/dev/null) ; do
+ for i in $(find dojo/fixtures/extra_*.json | sort -n 2>/dev/null) ; do
echo "Loading $i"
- python3 manage.py loaddata ${i%.*}
+ python3 manage.py loaddata "${i%.*}"
done
echo "Installing watson search index"
@@ -118,6 +176,6 @@ EOD
echo "Migration of textquestions for surveys"
python3 manage.py migrate_textquestions
- initialize_test_types
-
+ create_announcement_banner
+ initialize_data
fi
diff --git a/docker/entrypoint-integration-tests.sh b/docker/entrypoint-integration-tests.sh
index abb009a040d..5a75ed6b5b7 100755
--- a/docker/entrypoint-integration-tests.sh
+++ b/docker/entrypoint-integration-tests.sh
@@ -1,20 +1,22 @@
#!/bin/bash
+. /secret-file-loader.sh
+
echo "Testing DefectDojo Service"
echo "Waiting max 60s for services to start"
# Wait for services to become available
COUNTER=0
while [ $COUNTER -lt 10 ]; do
- curl -s -o "/dev/null" $DD_BASE_URL -m 120
- CR=$(curl --insecure -s -m 10 -I "${DD_BASE_URL}login?next=/" | egrep "^HTTP" | cut -d' ' -f2)
+ curl -s -o "/dev/null" "$DD_BASE_URL" -m 120
+ CR=$(curl --insecure -s -m 10 -I "${DD_BASE_URL}login?next=/" | grep -E "^HTTP" | cut -d' ' -f2)
if [ "$CR" == 200 ]; then
echo "Succesfully displayed login page, starting integration tests"
break
fi
echo "Waiting: cannot display login screen; got HTTP code $CR"
sleep 10
- let COUNTER=COUNTER+1
+ (( a++ )) || true
done
if [ $COUNTER -gt 10 ]; then
@@ -22,7 +24,13 @@ if [ $COUNTER -gt 10 ]; then
exit 1
fi
-export CHROMEDRIVER=$(find /opt/chrome-driver -name chromedriver)
+CHROMEDRIVER=$(find /opt/chrome-driver -name chromedriver)
+export CHROMEDRIVER
+CHROME_PATH=/opt/chrome/chrome
+export CHROME_PATH
+
+# We are strict about Warnings during testing
+export PYTHONWARNINGS=error
# Run available unittests with a simple setup
# All available Integrationtest Scripts are activated below
@@ -31,184 +39,226 @@ export CHROMEDRIVER=$(find /opt/chrome-driver -name chromedriver)
# Exits with status code of 1
function fail() {
- echo "Error: $1 test failed\n"
+ printf 'Error: %s test failed\n' "$1"
exit 1
}
function success() {
- echo "Success: $1 test passed\n"
+ printf 'Success: %s test passed\n' "$1"
}
echo "IT FILENAME: $DD_INTEGRATION_TEST_FILENAME"
-if [[ ! -z "$DD_INTEGRATION_TEST_FILENAME" ]]; then
- test=$DD_INTEGRATION_TEST_FILENAME
- echo "Running: $test"
- if python3 $DD_INTEGRATION_TEST_FILENAME; then
- success $test
+if [[ -n "$DD_INTEGRATION_TEST_FILENAME" ]]; then
+ if [[ "$DD_INTEGRATION_TEST_FILENAME" == "openapi-validatator" ]]; then
+ test="OpenAPI schema validation"
+ echo "Running: $test"
+ if java -jar /usr/local/bin/openapi-generator-cli.jar validate -i "$DD_BASE_URL/api/v2/oa3/schema/?format=json" --recommend; then
+ success "$test"
+ else
+ fail "$test"
+ fi
else
- fail $test
+ test=$DD_INTEGRATION_TEST_FILENAME
+ echo "Running: $test"
+ if python3 "$DD_INTEGRATION_TEST_FILENAME"; then
+ success "$test"
+ else
+ fail "$test"
+ fi
fi
else
test="Finding integration tests"
echo "Running: $test"
if python3 tests/finding_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Report Builder tests"
echo "Running: $test"
if python3 tests/report_builder_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Notes integration tests"
echo "Running: $test"
if python3 tests/notes_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Regulation integration tests"
echo "Running: $test"
if python3 tests/regulations_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product type integration tests"
echo "Running: $test"
if python3 tests/product_type_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product integration tests"
echo "Running: $test"
if python3 tests/product_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Endpoint integration tests"
echo "Running: $test"
if python3 tests/endpoint_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Engagement integration tests"
echo "Running: $test"
if python3 tests/engagement_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Environment integration tests"
echo "Running: $test"
if python3 tests/environment_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Test integration tests"
echo "Running: $test"
if python3 tests/test_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="User integration tests"
echo "Running: $test"
if python3 tests/user_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Group integration tests"
echo "Running: $test"
if python3 tests/group_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product Group integration tests"
echo "Running: $test"
if python3 tests/product_group_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product Type Group integration tests"
echo "Running: $test"
if python3 tests/product_type_group_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product member integration tests"
echo "Running: $test"
if python3 tests/product_member_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Product type member integration tests"
echo "Running: $test"
if python3 tests/product_type_member_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Ibm Appscan integration test"
echo "Running: $test"
if python3 tests/ibm_appscan_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Search integration test"
echo "Running: $test"
if python3 tests/search_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="File Upload tests"
echo "Running: $test"
if python3 tests/file_test.py ; then
- success $test
+ success "$test"
else
- fail $test
+ fail "$test"
fi
test="Dedupe integration tests"
echo "Running: $test"
if python3 tests/dedupe_test.py ; then
- success $test
+ success "$test"
+ else
+ fail "$test"
+ fi
+
+ test="Global Announcement Banner tests"
+ echo "Running: $test"
+ if python3 tests/announcement_banner_test.py ; then
+ success "$test"
else
- fail $test
+ fail "$test"
+ fi
+
+ test="Close Old Findings with dedupe integration tests"
+ echo "Running: $test"
+ if python3 tests/close_old_findings_dedupe_test.py ; then
+ success "$test"
+ else
+ fail "$test"
+ fi
+
+ test="Close Old Findings without dedupe integration tests"
+ echo "Running: $test"
+ if python3 tests/close_old_findings_test.py ; then
+ success "$test"
+ else
+ fail "$test"
+ fi
+
+ test="False Positive History tests"
+ echo "Running: $test"
+ if python3 tests/false_positive_history_test.py ; then
+ success "$test"
+ else
+ fail "$test"
fi
# The below tests are commented out because they are still an unstable work in progress
@@ -238,5 +288,22 @@ else
# else
# echo "Error: Zap integration test failed"; exit 1
# fi
+
+ test="Tool Config integration tests"
+ echo "Running: $test"
+ if python3 tests/tool_config.py ; then
+ success "$test"
+ else
+ fail "$test"
+ fi
+
+ test="OpenAPI schema validation"
+ echo "Running: $test"
+ if java -jar /usr/local/bin/openapi-generator-cli.jar validate -i "$DD_BASE_URL/api/v2/oa3/schema/?format=json" --recommend; then
+ success "$test"
+ else
+ fail "$test"
+ fi
+
exec echo "Done Running all configured integration tests."
fi
diff --git a/docker/entrypoint-nginx.sh b/docker/entrypoint-nginx.sh
index 9a3b93f5be3..c1e542052f2 100755
--- a/docker/entrypoint-nginx.sh
+++ b/docker/entrypoint-nginx.sh
@@ -26,7 +26,8 @@ fi
if [ "${METRICS_HTTP_AUTH_PASSWORD}" != "" ]; then
sed -i "s/#auth_basic/auth_basic/g;" $NGINX_CONFIG
rm -rf /etc/nginx/.htpasswd
- echo -n $METRICS_HTTP_AUTH_USER:$(openssl passwd -apr1 $METRICS_HTTP_AUTH_PASSWORD) >> /etc/nginx/.htpasswd
+ openssl_passwd=$(openssl passwd -apr1 "$METRICS_HTTP_AUTH_PASSWORD")
+ echo "$METRICS_HTTP_AUTH_USER":"$openssl_passwd" >> /etc/nginx/.htpasswd
echo "Basic auth is on for user ${HTTP_AUTH_LOGIN}..."
else
echo "Basic auth is off (HTTP_AUTH_PASSWORD not provided)"
diff --git a/docker/entrypoint-unit-tests-devDocker.sh b/docker/entrypoint-unit-tests-devDocker.sh
index 758705f7e76..96f9906c177 100755
--- a/docker/entrypoint-unit-tests-devDocker.sh
+++ b/docker/entrypoint-unit-tests-devDocker.sh
@@ -6,10 +6,18 @@ set -x
set -e
set -v
+. /secret-file-loader.sh
+
cd /app
# Unset the database URL so that we can force the DD_TEST_DATABASE_NAME (see django "DATABASES" configuration in settings.dist.py)
unset DD_DATABASE_URL
+# Unset the celery broker URL so that we can force the other DD_CELERY_BROKER settings
+unset DD_CELERY_BROKER_URL
+
+# We are strict about Warnings during testing
+export PYTHONWARNINGS=error
+
python3 manage.py makemigrations dojo
python3 manage.py migrate
@@ -26,8 +34,8 @@ You made changes to the REST API without applying the correct schema annotations
These schema annotations are needed to allow for the correct generation of
the OpenAPI (v3) schema's and documentation.
-Review the warnings generated by drf-spectacular and see `dojo/api_v2/views.py`
-and/or `dojo/api_v2/serializers.py`.
+Review the warnings generated by drf-spectacular and see "dojo/api_v2/views.py"
+and/or "dojo/api_v2/serializers.py".
You can check for warnings locally by running
diff --git a/docker/entrypoint-unit-tests.sh b/docker/entrypoint-unit-tests.sh
index 9c0a51d6502..16a5e6c29b5 100755
--- a/docker/entrypoint-unit-tests.sh
+++ b/docker/entrypoint-unit-tests.sh
@@ -6,10 +6,19 @@
# set -e
# set -v
-cd /app
+
+. /secret-file-loader.sh
+
+cd /app || exit
# Unset the database URL so that we can force the DD_TEST_DATABASE_NAME (see django "DATABASES" configuration in settings.dist.py)
unset DD_DATABASE_URL
+# Unset the celery broker URL so that we can force the other DD_CELERY_BROKER settings
+unset DD_CELERY_BROKER_URL
+
+# We are strict about Warnings during testing
+export PYTHONWARNINGS=error
+
# TARGET_SETTINGS_FILE=dojo/settings/settings.py
# if [ ! -f ${TARGET_SETTINGS_FILE} ]; then
# echo "Creating settings.py"
@@ -26,8 +35,8 @@ You made changes to the REST API without applying the correct schema annotations
These schema annotations are needed to allow for the correct generation of
the OpenAPI (v3) schema's and documentation.
-Review the warnings generated by drf-spectacular and see `dojo/api_v2/views.py`
-and/or `dojo/api_v2/serializers.py`.
+Review the warnings generated by drf-spectacular and see "dojo/api_v2/views.py"
+and/or "dojo/api_v2/serializers.py".
You can check for warnings locally by running
diff --git a/docker/entrypoint-uwsgi-dev.sh b/docker/entrypoint-uwsgi-dev.sh
index 8be7c6ce449..cb6aca58c53 100755
--- a/docker/entrypoint-uwsgi-dev.sh
+++ b/docker/entrypoint-uwsgi-dev.sh
@@ -1,13 +1,16 @@
#!/bin/sh
+. /secret-file-loader.sh
-cd /app
+
+cd /app || exit
# Full list of uwsgi options: https://uwsgi-docs.readthedocs.io/en/latest/Options.html
# --lazy-apps required for debugging --> https://uwsgi-docs.readthedocs.io/en/latest/articles/TheArtOfGracefulReloading.html?highlight=lazy-apps#preforking-vs-lazy-apps-vs-lazy
+DD_UWSGI_LOGFORMAT_DEFAULT='[pid: %(pid)|app: -|req: -/-] %(addr) (%(dd_user)) {%(vars) vars in %(pktsize) bytes} [%(ctime)] %(method) %(uri) => generated %(rsize) bytes in %(msecs) msecs (%(proto) %(status)) %(headers) headers in %(hsize) bytes (%(switches) switches on core %(core))'
-if [ ${DD_DEBUG} == "True" ]; then
+if [ "${DD_DEBUG}" = "True" ]; then
echo "Debug mode enabled, reducing # of processes and threads to 1"
DD_UWSGI_NUM_OF_PROCESSES=1
DD_UWSGI_NUM_OF_THREADS=1
@@ -18,11 +21,12 @@ exec uwsgi \
--protocol uwsgi \
--wsgi dojo.wsgi:application \
--enable-threads \
- --processes ${DD_UWSGI_NUM_OF_PROCESSES:-2} \
- --threads ${DD_UWSGI_NUM_OF_THREADS:-2} \
+ --processes "${DD_UWSGI_NUM_OF_PROCESSES:-2}" \
+ --threads "${DD_UWSGI_NUM_OF_THREADS:-2}" \
--reload-mercy 1 \
--worker-reload-mercy 1 \
--py-autoreload 1 \
--buffer-size="${DD_UWSGI_BUFFER_SIZE:-8192}" \
--lazy-apps \
- --touch-reload="/app/dojo/setting/settings.py"
+ --touch-reload="/app/dojo/setting/settings.py" \
+ --logformat "${DD_UWSGI_LOGFORMAT:-$DD_UWSGI_LOGFORMAT_DEFAULT}"
diff --git a/docker/entrypoint-uwsgi.sh b/docker/entrypoint-uwsgi.sh
index 5aa7a2baae7..3604ceb4c33 100755
--- a/docker/entrypoint-uwsgi.sh
+++ b/docker/entrypoint-uwsgi.sh
@@ -1,30 +1,18 @@
#!/bin/sh
-# Allow for bind-mount setting.py overrides
-FILE=/app/docker/extra_settings/settings.dist.py
-if test -f "$FILE"; then
- echo "============================================================"
- echo " Overriding DefectDojo's settings.dist.py with $FILE."
- echo "============================================================"
- cp "$FILE" /app/dojo/settings/settings.dist.py
-fi
+. /secret-file-loader.sh
-# Allow for bind-mount setting.py overrides
-FILE=/app/docker/extra_settings/settings.py
-if test -f "$FILE"; then
+# Allow for bind-mount multiple settings.py overrides
+FILES=$(ls /app/docker/extra_settings/* 2>/dev/null)
+NUM_FILES=$(echo "$FILES" | wc -w)
+if [ "$NUM_FILES" -gt 0 ]; then
+ COMMA_LIST=$(echo "$FILES" | tr -s '[:blank:]' ', ')
echo "============================================================"
- echo " Overriding DefectDojo's settings.py with $FILE."
+ echo " Overriding DefectDojo's local_settings.py with multiple"
+ echo " Files: $COMMA_LIST"
echo "============================================================"
- cp "$FILE" /app/dojo/settings/settings.py
-fi
-
-# Allow for bind-mount setting.py overrides
-FILE=/app/docker/extra_settings/local_settings.py
-if test -f "$FILE"; then
- echo "============================================================"
- echo " Overriding DefectDojo's local_settings.py with $FILE."
- echo "============================================================"
- cp "$FILE" /app/dojo/settings/local_settings.py
+ cp /app/docker/extra_settings/* /app/dojo/settings/
+ rm -f /app/dojo/settings/README.md
fi
umask 0002
@@ -32,14 +20,16 @@ umask 0002
# do the check with Django stack
python3 manage.py check
+DD_UWSGI_LOGFORMAT_DEFAULT='[pid: %(pid)|app: -|req: -/-] %(addr) (%(dd_user)) {%(vars) vars in %(pktsize) bytes} [%(ctime)] %(method) %(uri) => generated %(rsize) bytes in %(msecs) msecs (%(proto) %(status)) %(headers) headers in %(hsize) bytes (%(switches) switches on core %(core))'
+
exec uwsgi \
"--${DD_UWSGI_MODE}" "${DD_UWSGI_ENDPOINT}" \
--protocol uwsgi \
--enable-threads \
- --processes ${DD_UWSGI_NUM_OF_PROCESSES:-2} \
- --threads ${DD_UWSGI_NUM_OF_THREADS:-2} \
+ --processes "${DD_UWSGI_NUM_OF_PROCESSES:-2}" \
+ --threads "${DD_UWSGI_NUM_OF_THREADS:-2}" \
--wsgi dojo.wsgi:application \
--buffer-size="${DD_UWSGI_BUFFER_SIZE:-8192}" \
- --http 0.0.0.0:8081 --http-to ${DD_UWSGI_ENDPOINT}
- # HTTP endpoint is enabled for Kubernetes liveness checks. It should not be exposed as a serivce.
-
+ --http 0.0.0.0:8081 --http-to "${DD_UWSGI_ENDPOINT}" \
+ --logformat "${DD_UWSGI_LOGFORMAT:-$DD_UWSGI_LOGFORMAT_DEFAULT}"
+ # HTTP endpoint is enabled for Kubernetes liveness checks. It should not be exposed as a service.
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
index acd1ff490ff..e1173f633d0 100755
--- a/docker/entrypoint.sh
+++ b/docker/entrypoint.sh
@@ -1,7 +1,9 @@
#!/bin/sh
+. /secret-file-loader.sh
+
# Waits for the database to come up.
-./docker/wait-for-it.sh $DD_DATABASE_HOST:$DD_DATABASE_PORT
+./docker/wait-for-it.sh "$DD_DATABASE_HOST":"$DD_DATABASE_PORT"
if [ -z "$DD_DATABASE_URL" ]; then
if [ -z "$DD_DATABASE_PASSWORD" ]; then
diff --git a/docker/environments/mysql-rabbitmq.env b/docker/environments/mysql-rabbitmq.env
new file mode 100644
index 00000000000..0b41a9a5d3d
--- /dev/null
+++ b/docker/environments/mysql-rabbitmq.env
@@ -0,0 +1,17 @@
+# MySQL character set is explicitly set to utf8mb4
+DD_DATABASE_URL=mysql://defectdojo:defectdojo@mysql:3306/defectdojo?charset=utf8mb4
+DD_DATABASE_ENGINE=django.db.backends.mysql
+DD_DATABASE_HOST=mysql
+DD_DATABASE_PORT=3306
+
+DD_DATABASE_NAME=defectdojo
+DD_DATABASE_USER=defectdojo
+DD_DATABASE_PASSWORD=defectdojo
+
+DD_TEST_DATABASE_NAME=test_defectdojo
+DD_TEST_DATABASE_URL=mysql://defectdojo:defectdojo@mysql:3306/test_defectdojo?charset=utf8mb4
+
+DD_CELERY_BROKER_URL=amqp://guest:guest@rabbitmq:5672//
+
+DD_DOCKERCOMPOSE_DATABASE=mysql
+DD_DOCKERCOMPOSE_BROKER=rabbitmq
diff --git a/docker/environments/mysql-redis.env b/docker/environments/mysql-redis.env
new file mode 100644
index 00000000000..0d1ee431b37
--- /dev/null
+++ b/docker/environments/mysql-redis.env
@@ -0,0 +1,16 @@
+DD_DATABASE_URL=mysql://defectdojo:defectdojo@mysql:3306/defectdojo?charset=utf8mb4
+DD_DATABASE_ENGINE=django.db.backends.mysql
+DD_DATABASE_HOST=mysql
+DD_DATABASE_PORT=3306
+
+DD_DATABASE_NAME=defectdojo
+DD_DATABASE_USER=defectdojo
+DD_DATABASE_PASSWORD=defectdojo
+
+DD_TEST_DATABASE_NAME=test_defectdojo
+DD_TEST_DATABASE_URL=mysql://defectdojo:defectdojo@mysql:3306/test_defectdojo?charset=utf8mb4
+
+DD_CELERY_BROKER_URL=redis://redis:6379/0
+
+DD_DOCKERCOMPOSE_DATABASE=mysql
+DD_DOCKERCOMPOSE_BROKER=redis
diff --git a/docker/environments/postgres-rabbitmq.env b/docker/environments/postgres-rabbitmq.env
new file mode 100644
index 00000000000..28e0c16b91b
--- /dev/null
+++ b/docker/environments/postgres-rabbitmq.env
@@ -0,0 +1,16 @@
+DD_DATABASE_URL=postgresql://defectdojo:defectdojo@postgres:5432/defectdojo
+DD_DATABASE_ENGINE=django.db.backends.postgresql
+DD_DATABASE_HOST=postgres
+DD_DATABASE_PORT=5432
+
+DD_DATABASE_NAME=defectdojo
+DD_DATABASE_USER=defectdojo
+DD_DATABASE_PASSWORD=defectdojo
+
+DD_TEST_DATABASE_NAME=test_defectdojo
+DD_TEST_DATABASE_URL=postgresql://defectdojo:defectdojo@postgres:5432/test_defectdojo
+
+DD_CELERY_BROKER_URL=amqp://guest:guest@rabbitmq:5672//
+
+DD_DOCKERCOMPOSE_DATABASE=postgres
+DD_DOCKERCOMPOSE_BROKER=rabbitmq
diff --git a/docker/environments/postgres-redis.env b/docker/environments/postgres-redis.env
new file mode 100644
index 00000000000..defb62374bf
--- /dev/null
+++ b/docker/environments/postgres-redis.env
@@ -0,0 +1,16 @@
+DD_DATABASE_URL=postgresql://defectdojo:defectdojo@postgres:5432/defectdojo
+DD_DATABASE_ENGINE=django.db.backends.postgresql
+DD_DATABASE_HOST=postgres
+DD_DATABASE_PORT=5432
+
+DD_DATABASE_NAME=defectdojo
+DD_DATABASE_USER=defectdojo
+DD_DATABASE_PASSWORD=defectdojo
+
+DD_TEST_DATABASE_NAME=test_defectdojo
+DD_TEST_DATABASE_URL=postgresql://defectdojo:defectdojo@postgres:5432/test_defectdojo
+
+DD_CELERY_BROKER_URL=redis://redis:6379/0
+
+DD_DOCKERCOMPOSE_DATABASE=postgres
+DD_DOCKERCOMPOSE_BROKER=redis
diff --git a/docker/extra_settings/README.md b/docker/extra_settings/README.md
index e9e4df19efa..e919e1917bc 100644
--- a/docker/extra_settings/README.md
+++ b/docker/extra_settings/README.md
@@ -3,7 +3,7 @@ This folders is ignore by git and docker.
If a file if placed here, it will be copied on startup to `dojo/settings/local_settings.py`.
-For an example, see [template-local_settings](../dojo/settings/template-local_settings)
+For an example, see [template-local_settings](../../dojo/settings/template-local_settings)
Please note this copy action could fail if you have mounted the full `dojo/` folder, but that is owned by a different user/group.
That's why this copy action only happens in docker-compose release mode, and not in dev/debug/unit_tests/integration_tests modes.
diff --git a/docker/install_chrome_dependencies.py b/docker/install_chrome_dependencies.py
new file mode 100644
index 00000000000..5f4f714a430
--- /dev/null
+++ b/docker/install_chrome_dependencies.py
@@ -0,0 +1,60 @@
+"""
+This solution is largely based on the Playwright's browser dependencies script at
+https://github.com/microsoft/playwright/blob/main/utils/linux-browser-dependencies/inside_docker/list_dependencies.js
+"""
+
+import subprocess
+
+
+def find_packages(library_name):
+ stdout = run_command(["apt-file", "search", library_name])
+ if not stdout.strip():
+ return []
+ libs = [line.split(":")[0] for line in stdout.strip().split("\n")]
+ return list(set(libs))
+
+
+def run_command(cmd, cwd=None, env=None):
+ result = subprocess.run(cmd, cwd=cwd, env=env, capture_output=True, text=True)
+ return result.stdout
+
+
+def ldd(file_path):
+ stdout = run_command(["ldd", file_path])
+ # For simplicity, I'm assuming if we get an error, the code is non-zero.
+ try:
+ result = subprocess.run(
+ ["ldd", file_path], capture_output=True, text=True
+ )
+ stdout = result.stdout
+ code = result.returncode
+ except subprocess.CalledProcessError:
+ stdout = ""
+ code = 1
+ return stdout, code
+
+
+raw_deps = ldd("/opt/chrome/chrome")
+dependencies = raw_deps[0].splitlines()
+
+missing_deps = {
+ r[0].strip()
+ for d in dependencies
+ for r in [d.split("=>")]
+ if len(r) == 2 and r[1].strip() == "not found"
+}
+
+missing_packages = []
+for d in missing_deps:
+ all_packages = find_packages(d)
+ packages = [
+ p
+ for p in all_packages
+ if not any(
+ p.endswith(suffix) for suffix in ["-dbg", "-test", "tests", "-dev", "-mesa"]
+ )
+ ]
+ for p in packages:
+ missing_packages.append(p)
+
+print(" ".join(missing_packages))
diff --git a/docker/secret-file-loader.sh b/docker/secret-file-loader.sh
new file mode 100644
index 00000000000..157b6512a40
--- /dev/null
+++ b/docker/secret-file-loader.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+# Convert all environment variables with names ending in _FILE into the content of
+# the file that they point at and use the name without the trailing _FILE.
+# This can be used to carry in Docker secrets.
+# Inspired by https://github.com/grafana/grafana-docker/pull/166
+# But rewrote for /bin/sh
+for VAR_NAME in $(env | grep '^DD_[^=]\+_FILE=.\+' | sed -r "s/([^=]*)_FILE=.*/\1/g"); do
+ VAR_NAME_FILE="$VAR_NAME"_FILE
+ if [ -n "$(eval echo "\$$VAR_NAME")" ]; then
+ echo >&2 "WARNING: Both $VAR_NAME and $VAR_NAME_FILE are set. Content of $VAR_NAME will be overridden."
+ fi
+ echo "Getting secret $VAR_NAME from $(eval echo "\$$VAR_NAME_FILE")"
+ export "$VAR_NAME"="$(cat "$(eval echo "\$$VAR_NAME_FILE")")"
+ unset "$VAR_NAME_FILE"
+done
\ No newline at end of file
diff --git a/docker/setEnv.sh b/docker/setEnv.sh
index 8787a19b1b4..232998881ee 100755
--- a/docker/setEnv.sh
+++ b/docker/setEnv.sh
@@ -23,7 +23,7 @@ function show_current {
function get_current {
if [ -L ${override_link} ]
then
- # Check for Mac OSX
+ # Check for Mac OSX
if [[ "$OSTYPE" == "darwin"* ]]; then
# readlink is not native to mac, so this will work in it's place.
symlink=$(python3 -c "import os; print(os.path.realpath('docker-compose.override.yml'))")
@@ -31,7 +31,8 @@ function get_current {
# Maintain the cleaner way
symlink=$(readlink -f docker-compose.override.yml)
fi
- current_env=$(expr $(basename symlink) : "^docker-compose.override.\(.*\).yml$")
+ basename_symlink=$(basename "$symlink")
+ current_env=$(expr "$basename_symlink" : "^docker-compose.override.\(.*\).yml$")
else
current_env=release
fi
@@ -53,9 +54,9 @@ function set_release {
get_current
if [ "${current_env}" != release ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
# In release configuration there is no override file
rm ${override_link}
- docker-compose down
echo "Now using 'release' configuration."
else
echo "Already using 'release' configuration."
@@ -67,9 +68,9 @@ function set_dev {
get_current
if [ "${current_env}" != dev ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
rm -f ${override_link}
ln -s ${override_file_dev} ${override_link}
- docker-compose down
echo "Now using 'dev' configuration."
else
echo "Already using 'dev' configuration."
@@ -80,9 +81,9 @@ function set_debug {
get_current
if [ "${current_env}" != debug ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
rm -f ${override_link}
ln -s ${override_file_debug} ${override_link}
- docker-compose down
echo "Now using 'debug' configuration."
else
echo "Already using 'debug' configuration."
@@ -93,9 +94,9 @@ function set_unit_tests {
get_current
if [ "${current_env}" != unit_tests ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
rm -f ${override_link}
ln -s ${override_file_unit_tests} ${override_link}
- docker-compose down
echo "Now using 'unit_tests' configuration."
else
echo "Already using 'unit_tests' configuration."
@@ -106,9 +107,9 @@ function set_unit_tests_cicd {
get_current
if [ "${current_env}" != unit_tests_cicd ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
rm -f ${override_link}
ln -s ${override_file_unit_tests_cicd} ${override_link}
- docker-compose down
echo "Now using 'unit_tests_cicd' configuration."
else
echo "Already using 'unit_tests_cicd' configuration."
@@ -119,9 +120,9 @@ function set_integration_tests {
get_current
if [ "${current_env}" != integration_tests ]
then
+ docker compose --profile mysql-rabbitmq --profile postgres-redis --env-file ./docker/environments/mysql-rabbitmq.env down
rm -f ${override_link}
ln -s ${override_file_integration_tests} ${override_link}
- docker-compose down
echo "Now using 'integration_tests' configuration."
else
echo "Already using 'integration_tests' configuration."
@@ -129,9 +130,9 @@ function set_integration_tests {
}
# Change directory to allow working with relative paths.
-cd ${target_dir}
+cd "${target_dir}" || exit
-if [ ${#} -eq 1 ] && [[ 'dev debug unit_tests unit_tests_cicd integration_tests release' =~ "${1}" ]]
+if [ ${#} -eq 1 ] && [[ 'dev debug unit_tests unit_tests_cicd integration_tests release' =~ ${1} ]]
then
set_"${1}"
else
diff --git a/docker/unit-tests.sh b/docker/unit-tests.sh
index ee38b21b567..fc8dad8c8c8 100755
--- a/docker/unit-tests.sh
+++ b/docker/unit-tests.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Run available unittests with a simple setup
-cd /app
+cd /app || exit
python manage.py makemigrations dojo
python manage.py migrate
python manage.py test unittests -v 2
diff --git a/docker/wait-for-it.sh b/docker/wait-for-it.sh
index fb4a080fdf5..2bd5f21bc4b 100755
--- a/docker/wait-for-it.sh
+++ b/docker/wait-for-it.sh
@@ -1,7 +1,7 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
-cmdname=$(basename $0)
+cmdname=$(basename "$0")
echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
@@ -32,7 +32,7 @@ wait_for()
start_ts=$(date +%s)
while :
do
- (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1
+ (echo > "/dev/tcp/$HOST/$PORT") >/dev/null 2>&1
result=$?
if [[ $result -eq 0 ]]; then
end_ts=$(date +%s)
@@ -41,19 +41,19 @@ wait_for()
fi
sleep 1
done
- return $result
+ return "$result"
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $QUIET -eq 1 ]]; then
- timeout $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
+ timeout "$TIMEOUT" "$0" --quiet --child --host="$HOST" --port="$PORT" --timeout="$TIMEOUT" &
else
- timeout $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
+ timeout "$TIMEOUT" "$0" --child --host="$HOST" --port="$PORT" --timeout="$TIMEOUT" &
fi
PID=$!
- trap "kill -INT -$PID" INT
+ trap 'kill -INT -$PID' INT
wait $PID
RESULT=$?
if [[ $RESULT -ne 0 ]]; then
@@ -67,7 +67,7 @@ while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
- hostport=(${1//:/ })
+ IFS=":" read -r -a hostport <<< "$1"
HOST=${hostport[0]}
PORT=${hostport[1]}
shift 1
@@ -113,7 +113,7 @@ do
;;
--)
shift
- CLI="$@"
+ CLI=("$@")
break
;;
--help)
@@ -150,12 +150,12 @@ else
fi
fi
-if [[ $CLI != "" ]]; then
+if [[ ${CLI[0]} != "" ]]; then
if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then
echoerr "$cmdname: strict mode, refusing to execute subprocess"
exit $RESULT
fi
- exec $CLI
+ exec ${CLI[0]}
else
exit $RESULT
fi
diff --git a/docs/config.dev.toml b/docs/config.dev.toml
index 1d5d1370522..23b9cf30c52 100644
--- a/docs/config.dev.toml
+++ b/docs/config.dev.toml
@@ -104,13 +104,13 @@ section = ["HTML"]
# This menu appears only if you have at least one [params.versions] set.
version_menu = "Releases"
-# Flag used in the "version-banner" partial to decide whether to display a
+# Flag used in the "version-banner" partial to decide whether to display a
# banner on every page indicating that this is an archived version of the docs.
# Set this flag to "true" if you want to display the banner.
archived_version = false
# The version number for the version of the docs represented in this doc set.
-# Used in the "version-banner" partial to display a version number for the
+# Used in the "version-banner" partial to display a version number for the
# current doc set.
version = "0.0"
@@ -166,7 +166,7 @@ yes = 'Glad to hear it! Please tell us how we can improve.'
# Adds a reading time to the top of each doc.
-# If you want this feature, but occasionally need to remove the Reading time from a single page,
+# If you want this feature, but occasionally need to remove the Reading time from a single page,
# add "hide_readingtime: true" to the page's front matter
[params.ui.readingtime]
enable = false
diff --git a/docs/content/en/_index.md b/docs/content/en/_index.md
index 1c25c63bf12..4510d5554af 100644
--- a/docs/content/en/_index.md
+++ b/docs/content/en/_index.md
@@ -21,34 +21,33 @@ cascade:
### What is DefectDojo?
-DefectDojo is a security tool that automates application
-security vulnerability management. DefectDojo streamlines
-the application security testing process by offering features such as
-importing third party security findings, merging and de-duping,
-integration with Jira, templating, report generation and security
-metrics.
+DefectDojo is a DevSecOps platform. DefectDojo streamlines DevSecOps by serving as an aggregator and single pane of glass for your security tools. DefectDojo has smart features to enhance and tune the results from your security tools including the ability to merge findings, remember false positives, and distill duplicates. DefectDojo also integrates with JIRA, provides metrics / reports, and can also be used for traditional pen test management.
### What does DefectDojo do?
-While traceability and metrics are the ultimate end goal, DefectDojo is
-a bug tracker at its core. Taking advantage of DefectDojo\'s
+While automation and efficiency are the ultimate end goals, DefectDojo is
+a bug tracker at its core for vulnerabilities. Taking advantage of DefectDojo's
Product:Engagement model, enables traceability among multiple projects
-and test cycles, and allows for fine-grained reporting.
+/ test cycles, and allows for fine-grained reporting.
### How does DefectDojo work?
-1. [Getting started]({{< ref "/getting_started" >}}) will tell you how to install and configure DefectDojo.
-2. [Usage]({{< ref "/usage" >}}) shows how to use DefectDojo to manage vulnerabilities.
-3. A lot of [integrations]({{< ref "/integrations" >}}) help to fit DefectDojo in your environment.
-4. [Contributing]({{< ref "/contributing" >}}) gives insights how you can help to make DefectDojo even better.
+1. [Getting started]({{< ref "/getting_started" >}}) covers how to install and configure DefectDojo.
+2. [Usage]({{< ref "/usage" >}}) covers how to use DefectDojo to manage vulnerabilities.
+3. We support a large amount of [integrations]({{< ref "/integrations" >}}) to help fit DefectDojo in your DevSecOps program.
+
### Where to find DefectDojo?
-The code is open source, and [available on
+Proprietary editions that include [additional features](https://documentation.defectdojo.com/proprietary_plugins/) and support can be purchased through [defectdojo.com](https://www.defectdojo.com/).
+
+The open-source edition is [available on
GitHub](https://github.com/DefectDojo/django-DefectDojo).
-A running example is available on [the demo server](https://demo.defectdojo.org),
+A running example is available on [our demo server](https://demo.defectdojo.org),
using the credentials `admin` / `defectdojo@demo#appsec`. Note: The demo
server is refreshed regularly and provisioned with some sample data.
-You can also find videos of demos on [our YouTube channel](https://www.youtube.com/channel/UC3WVGA1vSO0IV-8cDxdqoPQ).
+Follow us on [LinkedIn](https://www.linkedin.com/company/33245534) for updates.
+To get in touch with us, please reach out to info@defectdojo.com
+
diff --git a/docs/content/en/contributing/branching-model.md b/docs/content/en/contributing/branching-model.md
index 3c1aa23ed32..9599ed90336 100644
--- a/docs/content/en/contributing/branching-model.md
+++ b/docs/content/en/contributing/branching-model.md
@@ -1,29 +1,71 @@
---
title: "Branching model"
-description: "How to create releases"
+description: "How we create releases"
draft: false
weight: 3
---
## Regular releases
-The DefectDojo team aims to release at least once a month, on the first Tuesday.
-Bugfix or security releases can come at any time.
+The DefectDojo team aims to maintain the following cadence:
-In doubt, GitHub Actions are the source of truth. The releases are semi-automated right now, with a DefectDojo maintainer proceeding with each major step in the release. The steps for a regular release are:
-1. Create the release branch from `dev` and prepare a PR against `master` ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-pr.yml))
+- Minor releases: at least once a month on the first Monday of the month.
+- Patch/Bugfix: releases every week on Monday.
+- Security releases: will be performed outside of our regular cadence depending on severity.
+
+GitHub Actions are the source of truth. The releases are semi-automated. The steps for a regular release are:
+1. Create the release branch from `dev` or `bugfix` and prepare a PR against `master` ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-pr.yml))
--> A maintainer verifies and manually merges the PR
-2. Tag, issue draft release and docker build+push ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-tag-docker.yml))
+1. Tag, issue draft release and docker build+push ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-tag-docker.yml))
--> A maintainer massages the release-drafter notes and publishes the release
-3. A PR to merge `master` back to `dev` is created to re-align the branches ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-master-into-dev.yml))
+1. A PR to merge `master` back to `dev` and `bugfix` is created to re-align the branches ([details](https://github.com/DefectDojo/django-DefectDojo/blob/master/.github/workflows/new-release-master-into-dev.yml))
## Security releases
PRs that relate to security issues are done through [security advisories](https://github.com/DefectDojo/django-DefectDojo/security/advisories) which provide a way to work privately on code without prematurely disclosing vulnerabilities.
## Release and hotfix model
-![Schemas](../../images/branching_model.png)
+![Schemas](../../images/branching_model_v2.png)
Diagrams created with [plantUML](https://plantuml.com). Find a web-based editor for PlantUML at https://www.planttext.com.
## Documentation
-A `dev` version of the documentation built from the `dev` branch is available at [DefectDojo Documentation - dev branch](https://defectdojo.github.io/django-DefectDojo/dev/).
+A `dev` version of the documentation built from the `dev` branch is available at [DefectDojo Documentation - dev branch](https://documentation.defectdojo.com/dev/).
+
+
+
+
+```
+@startuml
+
+participant "Dev Branch" as dev #LightBlue
+participant "BugFix Branch" as bugfix #LightGreen
+participant "Release Branch" as release #LightGoldenRodYellow
+participant "Master Branch" as master #LightSalmon
+
+== Minor Release (Monthly) ==
+
+dev -> release: Create branch "release/2.x.0"
+release -> master: Merge
+note right: Official Release\n - Tag 2.x.0\n - Push 2.x.0 to DockerHub
+master --> bugfix: Merge master into bugfix to realign
+master --> dev: Merge master back into dev
+
+== Patch/BugFix Release (Weekly) ==
+
+bugfix -> release: Create branch "release/2.x.y"
+release -> master: Merge
+note right: Official Release\n - Tag 2.x.y\n - Push 2.x.y to DockerHub
+master -> bugfix: Merge master back into bugfix to realign
+master --> dev: Merge master into dev to realign
+
+== Security Release (As Needed) ==
+
+master -> release: Create branch "release/2.x.y"
+release -> master: Merge
+note right: Official Release\n - Tag 2.x.y\n - Push 2.x.y to DockerHub
+master --> bugfix: Merge master into bugfix to realign
+master --> dev: Merge master into dev to realign
+
+@enduml
+```
+
diff --git a/docs/content/en/contributing/documentation.md b/docs/content/en/contributing/documentation.md
index faca05c54e9..95313d28344 100644
--- a/docs/content/en/contributing/documentation.md
+++ b/docs/content/en/contributing/documentation.md
@@ -5,8 +5,7 @@ draft: false
weight: 2
---
-The documentation is build with [Hugo](https://gohugo.io/) and uses the theme [Docsy](https://www.docsy.dev). The source code
-of the documentation is located in the [docs](https://github.com/DefectDojo/django-DefectDojo/tree/dev/doc) folder.
+The documentation is built with [Hugo](https://gohugo.io/) and uses the theme [Docsy](https://www.docsy.dev).
Static files for the webside are build with github actions and are publish in the gh-pages branch.
## How to run a local preview
@@ -15,7 +14,7 @@ Static files for the webside are build with github actions and are publish in th
2. Install JavaScript packages
- To build or update your site’s CSS resources, you also need PostCSS to create the final assets. If you need to install it, you must have a recent version of NodeJS installed on your machine so you can use npm, the Node package manager. By default npm installs tools under the directory where you run npm install:
+ To build or update your site’s CSS resources, you also need PostCSS to create the final assets. If you need to install it, you must have a recent version of NodeJS installed on your machine so you can use npm, the Node package manager. By default, npm installs tools under the directory where you run npm install:
{{< highlight bash >}}
cd docs
diff --git a/docs/content/en/contributing/how-to-write-a-parser.md b/docs/content/en/contributing/how-to-write-a-parser.md
index 1e9a89414ca..71141fcd1b0 100644
--- a/docs/content/en/contributing/how-to-write-a-parser.md
+++ b/docs/content/en/contributing/how-to-write-a-parser.md
@@ -13,10 +13,10 @@ All commands assume that you're located at the root of the django-DefectDojo clo
- You have forked https://github.com/DefectDojo/django-DefectDojo and cloned locally.
- Checkout `dev` and make sure you're up to date with the latest changes.
-- It's advised that you create a dedicated branch for your development, such as `git checkout -b parser-name` yet that's up to you.
+- It's advised that you create a dedicated branch for your development, such as `git checkout -b parser-name`.
-It is probably easier to use the docker-compose stack (and benefit from the hot-reload capbility for uWSGI).
-Set up your environment to use the debug environment, such as:
+It is easiest to use the docker-compose deployment as it has hot-reload capbility for uWSGI.
+Set up your environment to use the debug environment:
`$ docker/setEnv.sh debug`
@@ -24,7 +24,7 @@ Please have a look at [DOCKER.md](https://github.com/DefectDojo/django-DefectDoj
### Docker images
-You'd want to build your docker images locally, and eventually pass in your local user's `uid` to be able to write to the image (handy for database migration files). Assuming your user's `uid` is `1000`, then:
+You will want to build your docker images locally, and eventually pass in your local user's `uid` to be able to write to the image (handy for database migration files). Assuming your user's `uid` is `1000`, then:
{{< highlight bash >}}
$ docker-compose build --build-arg uid=1000
@@ -36,8 +36,10 @@ $ docker-compose build --build-arg uid=1000
|------- |--------
|`dojo/tools//__init__.py` | Empty file for class initialization
|`dojo/tools//parser.py` | The meat. This is where you write your actual parser. The class name must be the Python module name without underscores plus `Parser`. **Example:** When the name of the Python module is `dependency_check`, the class name shall be `DependencyCheckParser`
-|`dojo/unittests/scans//{many_vulns,no_vuln,one_vuln}.json` | Sample files containing meaningful data for unit tests. The minimal set.
+|`unittests/scans//{many_vulns,no_vuln,one_vuln}.json` | Sample files containing meaningful data for unit tests. The minimal set.
+|`unittests/tools/test__parser.py` | Unit tests of the parser.
|`dojo/settings/settings.dist.py` | If you want to use a modern hashcode based deduplication algorithm
+|`doc/content/en/integrations/parsers//.md` | Documentation, what kind of file format is required and how it should be obtained
## Factory contract
@@ -90,6 +92,10 @@ class MyToolParser(object):
```
+## API Parsers
+
+DefectDojo has a limited number of API parsers. While we won’t remove these connectors, adding API connectors has been problematic and thus we cannot accept new API parsers / connectors from the community at this time for supportability reasonsing. To maintain a high quality API connector, it is necessary to have a license to the tool. To get that license requires partnership with the author or vendor. We're close to announcing a new program to help address this and bring API connectors to DefectDojo.
+
## Template Generator
Use the [template](https://github.com/DefectDojo/cookiecutter-scanner-parser) parser to quickly generate the files required. To get started you will need to install [cookiecutter](https://github.com/cookiecutter/cookiecutter).
@@ -108,7 +114,7 @@ Read [more](https://github.com/DefectDojo/cookiecutter-scanner-parser) on the te
## Things to pay attention to
-Here is a list of advise that will make your parser future proof.
+Here is a list of considerations that will make the parser robust for both common cases and edge cases.
### Do not parse URLs by hand
@@ -156,7 +162,7 @@ Good example:
### Do not parse CVSS by hand (vector, score or severity)
-Data can have `CVSS` vectors or scores. Don't try to write your own CVSS score algorithm.
+Data can have `CVSS` vectors or scores. Don't write your own CVSS score algorithm.
For parser, we rely on module `cvss`.
It's easy to use and will make the parser aligned with the rest of the code.
@@ -219,7 +225,9 @@ Bad example (DIY):
## Deduplication algorithm
-By default a new parser uses the 'legacy' deduplication algorithm documented at https://defectdojo.github.io/django-DefectDojo/usage/features/#deduplication-algorithms
+By default a new parser uses the 'legacy' deduplication algorithm documented at https://documentation.defectdojo.com/usage/features/#deduplication-algorithms
+
+Please use a pre-defined deduplication algorithm where applicable.
## Unit tests
@@ -283,6 +291,12 @@ for finding in findings:
endpoint.clean()
```
+### Tests API Parsers
+
+Not only parser but also importer should be tested.
+`patch` method from `unittest.mock` is usualy usefull for simulating API responses.
+It is highly recommeded to use it.
+
## Other files that could be involved
### Change to the model
@@ -311,4 +325,12 @@ If you want to take a look at previous parsers that are now part of DefectDojo,
## Update the import page documentation
-Please update [`docs/content/en/integrations/parsers.md`] with the details of your new parser.
+Please add a new .md file in [`docs/content/en/integrations/parsers`] with the details of your new parser. Include the following content headings:
+
+* Acceptable File Type(s) - please include how to generate this type of file from the related tool, as some tools have multiple methods or require specific commands.
+* An example unit test block, if applicable.
+* A link to the relevant unit tests folder so that users can quickly navigate there from Documentation.
+* A link to the scanner itself - (e.g. GitHub or vendor link)
+
+Here is an example of a completed Parser documentation page: https://defectdojo.github.io/django-DefectDojo/integrations/parsers/file/awssecurityhub/
+
diff --git a/docs/content/en/contributing/parser-documentation-template.md b/docs/content/en/contributing/parser-documentation-template.md
new file mode 100644
index 00000000000..10048d07d4f
--- /dev/null
+++ b/docs/content/en/contributing/parser-documentation-template.md
@@ -0,0 +1,20 @@
+---
+title: "Parser Documentation Template"
+toc_hide: true
+weight: 1
+---
+
+Use this template as part of writing a new parser.
+
+* Copy this .md file and add it to docs/integrations/parsers/file in the GitHub repository
+* Update the title to match the name of your new parser
+* Fill out all sections listed below
+
+### File Types
+Specify all file types accepted by your parser. Include a process for creating the acceptable file from the related security tool.
+
+### Sample Scan Data / Unit Tests
+Add a link to the relevant unit tests or sample scan data folder in the GitHub repository.
+
+### Link To Tool
+A link to the scanner itself - (e.g. GitHub or appropriate vendor link)
diff --git a/docs/content/en/contributing/wishlist.md b/docs/content/en/contributing/wishlist.md
deleted file mode 100644
index 6bbbd274112..00000000000
--- a/docs/content/en/contributing/wishlist.md
+++ /dev/null
@@ -1,13 +0,0 @@
----
-title: "Wishlist"
-description: "Be careful what you wish for"
-draft: false
-weight: 2
----
-
-To manage expectations, we call this the wishlist. These are items we want to do, are discussing or pondering our minds:
-- New modern UI / SPA
-- New dashboarding / statistics
-- New search engine
-- Adopt a plugin framework to allow plugins for issue trackers, parsers, reports, etc
-- More flexible model
diff --git a/docs/content/en/getting_started/architecture.md b/docs/content/en/getting_started/architecture.md
index 61632a1e9d0..594bc65bc98 100644
--- a/docs/content/en/getting_started/architecture.md
+++ b/docs/content/en/getting_started/architecture.md
@@ -1,6 +1,6 @@
---
title: "Architecture"
-description: "DefectDojo consists of several components that work together closely."
+description: "The DefectDojo platform consists of several components that work together closely."
draft: false
weight: 1
---
@@ -15,17 +15,17 @@ images, JavaScript files or CSS files.
## uWSGI
[uWSGI](https://uwsgi-docs.readthedocs.io/en/latest/) is the application server
-that runs the DefectDojo application, written in Python/Django, to serve all
+that runs the DefectDojo platform, written in Python/Django, to serve all
dynamic content.
## Message Broker
The application server sends tasks to a [Message Broker](https://docs.celeryproject.org/en/stable/getting-started/brokers/index.html)
-for asynchronous execution. [RabbitMQ](https://www.rabbitmq.com/) is a well supported choice.
+for asynchronous execution. [RabbitMQ](https://www.rabbitmq.com/) is a well established choice.
## Celery Worker
-Tasks like deduplication or the Jira synchonization are performed asynchronously
+Tasks like deduplication or the JIRA synchronization are performed asynchronously
in the background by the [Celery](https://docs.celeryproject.org/en/stable/)
Worker.
@@ -37,11 +37,11 @@ Beat.
## Initializer
-The Initializer gets started during startup of DefectDojo to initialize the
-database and run database migrations after upgrades of DefectDojo. It shuts
+The Initializer setups / maintains the
+database and syncs / runs migrations after version upgrades. It shuts
itself down after all tasks are performed.
## Database
-The Database stores all data of DefectDojo. Currently [MySQL](https://dev.mysql.com/)
-and [PostgreSQL](https://www.postgresql.org/) are supported. Please note the `django-watson` search engine require one or more MyISAM tables, so you cannot use Azure MySQL or Cloud SQL for MySQL. AWS RDS MySQL supports MyISAM tables, so it will work.
+The Database stores all the application data of DefectDojo. Currently [PostgreSQL](https://www.postgresql.org/) and [MySQL](https://dev.mysql.com/)
+are supported, with PostgreSQL being the recommended option. Please note the `django-watson` search engine require one or more MyISAM tables, so you cannot use Azure MySQL or Cloud SQL for MySQL. AWS RDS MySQL supports MyISAM tables.
diff --git a/docs/content/en/getting_started/configuration.md b/docs/content/en/getting_started/configuration.md
index 162a0b5a1cc..402689a2164 100644
--- a/docs/content/en/getting_started/configuration.md
+++ b/docs/content/en/getting_started/configuration.md
@@ -7,11 +7,11 @@ weight: 3
## dojo/settings/settings.dist.py
-The main settings are all stored in [`dojo/settings/settings.dist.py`](https://github.com/DefectDojo/django-DefectDojo/blob/master/dojo/settings/settings.dist.py). It is great to use this file as a reference what can be configured, but it shouldn't be edited directly, because changes would be overridden when updating DefectDojo. There are several methods to change the default settings:
+The main settings are stored in [`dojo/settings/settings.dist.py`](https://github.com/DefectDojo/django-DefectDojo/blob/master/dojo/settings/settings.dist.py). It is great to use this file as a reference for what can be configured, but it shouldn\'t be edited directly, because changes will be overwritten when updating DefectDojo. There are several methods to change the default settings:
### Environment variables
-Most of these parameters can be set by environment variables.
+Most parameters can be set by environment variables.
When you deploy DefectDojo via **Docker Compose**, you can set environment variables in [`docker-compose.yml`](https://github.com/DefectDojo/django-DefectDojo/blob/master/docker-compose.yml). Be aware you have to set the variables for three services: `uwsgi`, `celerybeat` and `celeryworker`.
@@ -26,8 +26,9 @@ An example can be found in [`template_env`](https://github.com/DefectDojo/django
### local_settings.py (not with Kubernetes)
`local_settings.py` can contain more complex customizations such as adding MIDDLEWARE or INSTALLED_APP entries.
-This file is processed *after* settings.dist.py is processed, so you can modify settings delivered by Defect Dojo out of the box.
- The file must be located in the `dojo/settings` directory. Environment variables in this file must have no `DD_` prefix.
+This file is processed *after* settings.dist.py is processed, so you can modify settings delivered by DefectDojo out of the box.
+ The file must be located in the `dojo/settings` directory. Environment variables in this file must not have the `DD_` prefix.
+If the file is missing feel free to create it. Do not edit `settings.dist.py` directly.
An example can be found in [`dojo/settings/template-local_settings`](https://github.com/DefectDojo/django-DefectDojo/blob/master/dojo/settings/template-local_settings).
diff --git a/docs/content/en/getting_started/demo.md b/docs/content/en/getting_started/demo.md
index a52ef8373a2..cb78419bca8 100644
--- a/docs/content/en/getting_started/demo.md
+++ b/docs/content/en/getting_started/demo.md
@@ -1,6 +1,6 @@
---
title: "Demo"
-description: "There is Defect Dojo demo site running the latest official released version"
+description: "There is DefectDojo demo site running the latest officially released version"
draft: false
weight: 6
---
diff --git a/docs/content/en/getting_started/installation.md b/docs/content/en/getting_started/installation.md
index 39f9013ab8f..a127f36e492 100644
--- a/docs/content/en/getting_started/installation.md
+++ b/docs/content/en/getting_started/installation.md
@@ -5,19 +5,35 @@ draft: false
weight: 2
---
-## Docker Compose install (recommended)
+## **Recommended Options**
+---
+
+### Docker Compose
See instructions in [DOCKER.md]()
-## Kubernetes install
+### SaaS (Includes Support & Supports the Project)
+
+[SaaS link](https://www.defectdojo.com/pricing)
+
+### AWS AMI (Supports the Project)
+
+[Marketplace link](https://aws.amazon.com/marketplace/pp/prodview-m2a25gr67xbzk), and complete [walkthrough](https://www.10security.com/defectdojo-aws-launch-guide)
+
+---
+## **Options for the brave (not officially supported)**
+---
+### Kubernetes
See instructions in [KUBERNETES.md]()
-## Local install with godojo
+### Local install with godojo
See instructions in [README.md]()
in the godojo repository
+---
+
## Customizing of settings
See [Configuration](../configuration)
diff --git a/docs/content/en/getting_started/running-in-production.md b/docs/content/en/getting_started/running-in-production.md
index 32e5d324904..6da16d253b7 100644
--- a/docs/content/en/getting_started/running-in-production.md
+++ b/docs/content/en/getting_started/running-in-production.md
@@ -1,21 +1,23 @@
---
title: "Running in production"
-description: "Productive use of DefectDojo needs consideration of performance and backups."
+description: "For use in Production environments, performance tweaks and backups are recommended."
draft: false
weight: 4
---
-## Production with docker-compose
+## Production use with docker-compose
The docker-compose.yml file in this repository is fully functional to evaluate DefectDojo in your local environment.
Although Docker Compose is one of the supported installation methods to deploy a containerized DefectDojo in a production environment, the docker-compose.yml file is not intended for production use without first customizing it to your particular situation.
+See [Running with Docker Compose](https://github.com/DefectDojo/django-DefectDojo/blob/master/readme-docs/DOCKER.md) for more information how to run DefectDojo with Docker Compose.
+
### Database performance and backup
-It is recommended to use a dedicated database server and not the preconfigured MySQL database. This will improve the performance of DefectDojo
+It is recommended to use a dedicated database server and not the preconfigured PostgreSQL database. This will improve the performance of DefectDojo.
-In both case, if you use a dedicated database server or if you should decide to use the preconfigured MySQL database, make sure to make regular backups of the data. For a dedicated database server follow the instructions that come with the database server. For the preconfigured MySQL you can use mysqldump, e.g. as described in [How to backup a Docker MySQL database](https://dev.to/grant_bartlett/how-to-backup-a-docker-mysql-database-3nd8).
+In both cases (dedicated DB or containerized), if you are self-hosting, it is recommended that you implement and create periodic backups of your data.
### Backup of Media files
@@ -28,21 +30,16 @@ Please read the paragraphs below about key processes tweaks.
{{% /alert %}}
-Having taken the database to run elsewhere, the minimum recommendation
-is:
+With a separate database, the minimum recommendations
+are:
- 2 vCPUs
- 8 GB of RAM
-- 2 GB of disk space (remember, your database is not here \-- so
- basically, what you have for your O/S should do). You could allocate
+- 10 GB of disk space (remember, your database is not here \-- so
+ what you have for your O/S should do). You could allocate
a different disk than your OS\'s for potential performance
improvements.
-### Key processes
-
-Per , it is
-now easy to somewhat improve the uWSGI and celery worker performance.
-
#### uWSGI
By default (except in `ptvsd` mode for debug purposes), uWSGI will
@@ -60,20 +57,17 @@ concurrent connections.
#### Celery worker
-By default, a single mono-process celery worker is spawned. This is fine
-until you start having many findings, and when async operations like
-deduplication start to kick in. Eventually, it will starve your
-resources and crawl to a halt, while operations continue to queue up.
+By default, a single mono-process celery worker is spawned. When storing a large amount of findings, leveraging async functions (like deduplication), or both. Eventually, it is important to adjust these parameters to prevent resource starvation.
-The following variables will help a lot, while keeping a single celery
-worker container.
+
+The following variables can be changed to increase worker performance, while keeping a single celery container.
- `DD_CELERY_WORKER_POOL_TYPE` will let you switch to `prefork`.
(default `solo`)
-As you\'ve enabled `prefork`, the following variables have
-to be used. The default are working fairly well, see the
-Dockerfile.django for in-file references.
+When you enable `prefork`, the variables below have
+to be used. see the
+Dockerfile.django-* for in-file references.
- `DD_CELERY_WORKER_AUTOSCALE_MIN` defaults to 2.
- `DD_CELERY_WORKER_AUTOSCALE_MAX` defaults to 8.
@@ -85,30 +79,22 @@ You can execute the following command to see the configuration:
`docker-compose exec celerybeat bash -c "celery -A dojo inspect stats"`
and see what is in effect.
-###### Asynchronous Imports
+#### Asynchronous Import
-This is an experimental features that has some [concerns](https://github.com/DefectDojo/django-DefectDojo/pull/5553#issuecomment-989679555) that need to be addressed before it can be used reliably.
+**Please note: Asynchronous Import is currently an experimental feature. Please exercise caution with this method as results may be inconsistent.**
Import and Re-Import can also be configured to handle uploads asynchronously to aid in
-importing especially large files. It works by batching Findings and Endpoints by a
-configurable amount. Each batch will be be processed in seperate celery tasks.
+processing especially large scans. It works by batching Findings and Endpoints by a
+configurable amount. Each batch will be be processed in separate celery tasks.
-The following variables have to be used.
+The following variables impact async imports.
- `DD_ASYNC_FINDING_IMPORT` defaults to False
-- `DD_ASYNC_FINDING_IMPORT_CHUNK_SIZE` deafults to 100
+- `DD_ASYNC_FINDING_IMPORT_CHUNK_SIZE` defaults to 100
When using asynchronous imports with dynamic scanners, Endpoints will continue to "trickle" in
-even after the import has returned a successful respsonse. This is becasue processing continues
+even after the import has returned a successful response. This is because processing continues
to occur after the Findings have already been imported.
To determine if an import has been fully completed, please see the progress bar in the appropriate test.
-## Monitoring
-
-To expose Django statistics for [Prometheus](https://prometheus.io/), set
-`DJANGO_METRICS_ENABLED` to `True` in the settings
-(see [Configuration](../configuration)).
-
-The Prometheus endpoint is than available under the path:
-`http://dd_server/django_metrics/metrics`
diff --git a/docs/content/en/getting_started/upgrading.md b/docs/content/en/getting_started/upgrading.md
deleted file mode 100644
index 9da2941bc75..00000000000
--- a/docs/content/en/getting_started/upgrading.md
+++ /dev/null
@@ -1,658 +0,0 @@
----
-title: "Upgrading"
-description: "Release specific upgrading instructions"
-draft: false
-weight: 5
----
-
-Docker-compose
---------------
-
-When you deploy a vanilla docker-compose, it will create a persistent
-volume for your MySQL database. As long as your volume is there, you
-should not lose any data.
-
-### Using docker images provided in DockerHub
-
-{{% alert title="Information" color="info" %}}
-If you\'re using `latest`, then you need to pre pull the `latest` from
-DockerHub to update.
-{{% /alert %}}
-
-
-The generic upgrade method for docker-compose follows these steps:
-
-- Pull the latest version
-
- ``` {.sourceCode .bash}
- docker pull defectdojo/defectdojo-django:latest
- docker pull defectdojo/defectdojo-nginx:latest
- ```
-
-- If you would like to use something older (so not the latest
- version), specify the version (tag) you want to upgrade to:
-
- ``` {.sourceCode .bash}
- docker pull defectdojo/defectdojo-django:1.10.2
- docker pull defectdojo/defectdojo-nginx:1.10.2
- ```
-
-- Go to the directory where your docker-compose.yml file lives
-- Stop DefectDojo: `docker-compose stop`
-- Re-start DefectDojo, allowing for container recreation:
- `docker-compose up -d`
-- Database migrations will be run automatically by the initializer.
- Check the output via `docker-compose logs initializer` or relevant k8s command
-- If you have the initializer disabled (or if you want to be on the
- safe side), run the migration command:
- `docker-compose exec uwsgi /bin/bash -c 'python manage.py migrate`
-
-### Building your local images
-
-If you build your images locally and do not use the ones from DockerHub,
-the instructions are much the same, except that you'd build your images
-first. (Of course, if you're doing this, then you know you have to
-update the source code first)
-
-Replace the first step above with this one: `docker-compose build`
-
-godojo installations
---------------------
-
-If you have installed DefectDojo on "iron" and wish to upgrade the installation, please see the [instructions in the repo](https://github.com/DefectDojo/godojo/blob/master/docs-and-scripts/upgrading.md).
-
-## Upgrading to DefectDojo Version 2.6.x.
-
-There are no special instruction for upgrading to 2.6.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.6.0) for the contents of the release.
-
-Please consult the security advisories [GHSA-f82x-m585-gj24](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-f82x-m585-gj24) (moderate) and [GHSA-v7fv-g69g-x7p2](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-v7fv-g69g-x7p2) (high) to see what security issues were fixed in this release. These will be published and become visible at January 18th, 2022.
-
-
-## Upgrading to DefectDojo Version 2.5.x.
-
-Legacy authorization has been completely removed with version 2.5.0. This includes removal of the migration of users
-to the new authorization as described in https://defectdojo.github.io/django-DefectDojo/getting_started/upgrading/#authorization.
-If you are still using the legacy authorization, you should run the migration with ``./manage.py migrate_authorization_v2``
-before upgrading to version 2.5.0
-
-This release introduces the "Forgot password" functionality (`DD_FORGOT_PASSWORD`: default `True`). The function
-allows sending an e-mail with the reset password link. Missing configuration or misconfiguration of SMTP
-(`DD_EMAIL_URL`) could raise an error (HTTP-500). Check and test (for example by resetting your own password) if you
-configured SMTP correctly. If you want to avoid HTTP-500 and you don't want to set up SMTP, you can just simply switch
-off the "Forgot password" functionality (`DD_FORGOT_PASSWORD=False`).
-
-Release renamed system setting `mail_notifications_from` to `email_from`. This value will not be used only for sending
-notifications but also for sending the reset password emails. It is highly recommended to check the content of this
-value if you are satisfied. If you installed DefectDojo earlier, you can expect `"from@example.com"` there. A fresh
-installation will use `"no-reply@example.com"`
-
-This release [updates](https://github.com/DefectDojo/django-DefectDojo/pull/5450) our helm dependencies. There is a breaking change if you are using the mysql database from the helm chart because we replaced the deprecated chart from the stable repo with a chart from bitnami. If you have persistance enabled, ensure to backup your data before upgrading. All data get lost when replacing the mysql chart during the upgrade. For data migration take a look at the mysql backup and restore process.
-
-Furthermore we updated our kubernetes version. Current tests run on 1.18.16 and 1.22.0.
-
-## Upgrading to DefectDojo Version 2.4.x. (Security Release)
-
-This releases fixes a High severity vulnerability for which the details will be disclosed on November 16th in [GHSA-fwg9-752c-qh8w](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-fwg9-752c-qh8w)
-
-There is a breaking change in the API for importing and re-importings scans with SonarQube API and Cobalt.io API. The [scan configurations
-have been unified](https://github.com/DefectDojo/django-DefectDojo/pull/5289) and are set now with the attribute `api_scan_configuration`.
-The existing configurations for SonarQube API and Cobalt.io API have been migrated.
-
-At the request of pyup.io, we had to remove the parser for Safety scans.
-
-
-## Upgrading to DefectDojo Version 2.3.x.
-
-There are no special instruction for upgrading to 2.3.0.
-In 2.3.0 we [changed the default password hashing algorithm to Argon2 (from PBKDF2)](https://github.com/DefectDojo/django-DefectDojo/pull/5205).
-When logging in, exising hashes get replaced by an Argon2 hash. If you want to rehash password without users having to login,
-please see the [Django password management docs](https://docs.djangoproject.com/en/3.2/topics/auth/passwords/).
-The previous password hashing algorithm (PBKDF2) was not unsafe, but we wanted to follow the [OWASP guidelines](https://cheatsheetseries.owasp.org/cheatsheets/Password_Storage_Cheat_Sheet.html).
-
-
-## Upgrading to DefectDojo Version 2.2.x.
-
-Upgrade to 2.0.0 contained migration of endpoints. Some parts of migration haven't been done properly. This deficiency
-may manifest as a doubled slash in endpoint URLs (like `http://foo.bar:8080//test`) or as a problem with deduplication
-of the same endpoints. The mentioned bug was fixed in 2.2.0 and if you have seen these kinds of problems, just rerun
-"Endpoint migration" as it is written in [Upgrading to DefectDojo Version 2.0.x.](#upgrading-to-defectdojo-version-20x).
-
-
-## Upgrading to DefectDojo Version 2.0.x.
-
-Follow the usual steps to upgrade as described above.
-
-BEFORE UPGRADING
-- If you are using SAML2 checkout the new [documentaion](https://defectdojo.github.io/django-DefectDojo/integrations/social-authentication/#saml-20) and update you settings following the migration section. We replaced [django-saml2-auth](https://github.com/fangli/django-saml2-auth) with [djangosaml2](https://github.com/IdentityPython/djangosaml2).
-
-AFTER UPGRADING
-- Usual migration process (`python manage.py migrate`) try to migrate all endpoints to new format and merge duplicates.
-- All broken endpoints (which weren't possible to migrate) have red flag 🚩 in standard list of endpoints.
-- Check if all your endpoints was migrated successfully, go to: https:///endpoint/migrate.
-- Alternatively, this can be run as management command: `docker-compose exec uwsgi ./manage.py endpoint_migration --dry-run`
-- When all endpoint will be fixed (there is not broken endpoint), press "Run migration" in https:///endpoint/migrate
-- Or, you can run management command: `docker-compose exec uwsgi ./manage.py endpoint_migration`
-- Details about endpoint migration / improvements in https://github.com/DefectDojo/django-DefectDojo/pull/4473
-
-We decided to name this version 2.0.0 because we did some big cleanups in this release:
-
-- Remove API v1 ([#4413](https://github.com/DefectDojo/django-DefectDojo/pull/4413))
-- Remove setup.bash installation method ([#4417](https://github.com/DefectDojo/django-DefectDojo/pull/4417))
-- Rename Finding.is_Mitigated field to Finding.is_mitigated ([#3854](https://github.com/DefectDojo/django-DefectDojo/pull/4854))
-- Remove everything related to the old tagging library ([#4419](https://github.com/DefectDojo/django-DefectDojo/pull/4419))
-- Remove S0/S1/S2../S5 severity display option ([#4415](https://github.com/DefectDojo/django-DefectDojo/pull/4415))
-- Refactor EndPoint handling/formatting ([#4473](https://github.com/DefectDojo/django-DefectDojo/pull/4473))
-- Upgrade to Django 3.x ([#3632](https://github.com/DefectDojo/django-DefectDojo/pull/3632))
-- PDF Reports removed ([#4418](https://github.com/DefectDojo/django-DefectDojo/pull/4418))
-- Hashcode calculation logic has changed. To update existing findings run:
-
- `./manage.py dedupe --hash_code_only`.
-
-If you're using docker:
-
-`docker-compose exec uwsgi ./manage.py dedupe --hash_code_only`.
-
-This can take a while depending on your instance size.
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.0.0
-
-### Endpoints
-
-- The usual migration process (`python manage.py migrate`) tries to migrate all endpoints to new format and merge duplicates.
-- All broken endpoints (which weren't possible to migrate) have a red flag 🚩 in the standard list of endpoints.
-- Check if all your endpoints were migrated successfully, go to: https:///endpoint/migrate.
-- Alternatively, this can be run as management command: `docker-compose exec uwsgi ./manage.py endpoint_migration --dry-run`
-- When all endpoint are fixed (there is not broken endpoint), press "Run migration" in https:///endpoint/migrate
-- Or, you can run management command: `docker-compose exec uwsgi ./manage.py endpoint_migration`
-- Details about endpoint migration / improvements in https://github.com/DefectDojo/django-DefectDojo/pull/4473
-
-### Authorization
-
-The new authorization system for Products and Product Types based on roles is the default now. The fields for authorized users are not available anymore, but you can assign roles as described in [Permissions](../../usage/permissions). Users are migrated automatically, so that their permissions are as close as possible to the previous authorization:
-- Superusers will still have all permissions on Products and Product Types, so they must not be changed.
-- Staff users have had all permissions for all product types and products, so they will be get a global role as *Owner*.
-- Product_Members and Product Type_Members will be added for authorized users according to the settings for the previous authorization:
- - The *Reader* role is set as the default.
- - If `AUTHORIZED_USERS_ALLOW_STAFF` is `True`, the user will get the *Owner* role for the respective Product or Product Type.
- - If `AUTHORIZED_USERS_ALLOW_CHANGE` or `AUTHORIZED_USERS_ALLOW_DELETE` is `True`, the user will get the *Writer* role for the respective Product or Product Type.
-
-The new authorization is active for both UI and API. Permissions set via authorized users or via the Django Admin interface are no longer taken into account.
-
-Please review the roles for your users after the upgrade to avoid an unintended permissions creep.
-
-
-## Upgrading to DefectDojo Version 1.15.x
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.15.0
-- If you have made changes to JIRA templates or the template config in the JIRA Project config for instances/products/engagements:
-The jira template settings introduced in 1.13 have been changed. You now have to select a subfolder instead of a sinlge template file. If you have chosen a non-default template here, you have to reapply that to all products / engagements. Also you have to move your custom templates into the correct subfolder in `dojo/templates/issue-trackers/`.
-- Hashcode calculation logic has changed in #4134, #4308 and #4310 to update existing findings run:
-
- `./manage.py dedupe --hash_code_only`
-
-If you're using docker:
-
-`docker-compose exec uwsgi ./manage.py dedupe --hash_code_only`
-
-This can take a while depending on your instance size.
-
-
-
-## Upgrading to DefectDojo Version 1.14.x
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.14.0
-
-Note that the below fields are now optional without default value. They will not be filled anymore with values such as "No references given" when found empty while saving the findings
-- mitigation
-- references
-- impact
-- url
-
-
-
-## Upgrading to DefectDojo Version 1.13.x
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.13.0
-- Hashcode settings affecting deduplication have changed, to update existing findings run:
-
- `./manage.py dedupe`
-
-If you're using docker:
-
- docker-compose exec uwsgi ./manage.py dedupe
-
-This can take a while depeneding on your instance size. It might possible that new duplicates are detected among existing findings, so make a backup before running!
-
-
-## Upgrading to DefectDojo Version 1.12.x
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.12.0
-- 1.12.1 is a security release https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.12.1
-
-## Upgrading to DefectDojo Version 1.11.x
-
-- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.11.0
-- 1.11.1 is a security release https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.11.1
-
-## Upgrading to DefectDojo Version 1.10.x
-
-**1.10.4 is a security release**
-
-- See the security advisory:
-
-- See release notes:
-
-- Version 1.10.4 replaces 1.10.3 as the latter contained an incomplete
- fix
-
-**What\'s New:**
-
-- See release notes:
-
-- DefectDojo now provides a `settings.py` file
- out-of-the-box. Custom settings need to go into
- `local\_settings.py`. See
-
- and
-
-- A quickfix is to rename your own / customized
- `settings.py` or `settings.dist.py` to
- `local\_settings.py`. Details of that PR:
-
-- Major JIRA integration refactoring, for which you should at least
- use 1.10.1 and not 1.10.0 for many bug fixes.
-
-**Breaking changes**
-
-Kubernetes/Helm users: we have moved away from the \"stable\" repository
-to \"bitnami\" in this release. The bitnami postgresql chart required us
-to add a new key to the postgresql secret, which will give you the error
-`postgresql-postgres-password is missing` if you have
-`createPostgresqlSecret: false`. In 1.10.1, a fix was also included to
-allow your existing `postgresqlPassword` to be reused properly.
-
-Including in 1.10.1 were a couple fixes related to a rabbitMQ upgrade.
-The path to access `password`, `erlangCookie` and
-`existingPasswordSecret` changed from `rabbitmq` to `auth`. Furthermore,
-as rabbitMQ is deployed as a StatefulSet, an in-place upgrade is not
-possible and an error will likely be thrown such as
-`Forbidden: updates to statefulset spec for fields other than 'replicas', 'template', and 'updateStrategy' are forbidden`.
-After ensuring your rabbitMQ celery queue is empty, you will then want
-to delete your rabbitMQ StatefulSet and PVC to allow them to get
-re-created, or fully delete and recreate defectdojo.
-
-## Upgrading to DefectDojo Version 1.9.3
-
-**This is a security release**
-
-- See the [security
- advisory](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-8q8j-7wc4-vjg5)
-- See [release
- notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.9.3)
-
-**What\'s New:**
-
-- See release notes:
-
-
-**NOTE:**
-
-When upgrading from before 1.9.2, a corrective script may need to be ran
-
-`./manage.py create\_endpoint\_status`
-
-If you\'re using docker:
-
-`docker-compose exec uwsgi ./manage.py create\_endpoint\_status`
-
-This can take a while depending on your hardware and the number of
-findings in your instance.
-
-- Search index tweaking index rebuild after upgrade:
-
-This requires a (one-time) rebuild of the Django-Watson search index.
-Execute the django command from the defect dojo installation directory:
-
-`./manage.py buildwatson]`
-
-If you\'re using docker:
-
-`docker-compose exec uwsgi ./manage.py buildwatson`
-
-This can take a while depending on your hardware and the number of
-findings in your instance.
-
-## Upgrading to DefectDojo Version 1.8.0
-
-**What\'s New:**
-
-- See release notes:
-
-- Improved search, which requires an index rebuild
- ()
-
-This requires a (one-time) rebuild of the Django-Watson search index.
-Execute the django command from the defect dojo installation directory:
-
-`./manage.py buildwatson`
-
-If you\'re using docker:
-
-`docker-compose exec uwsgi ./manage.py buildwatson`
-
-This can take a while depending on your hardware and the number of
-findings in your instance.
-
-- **NOTE:**
-
-As a result of a breaking bug revolving around Endpoint\_status objects,
-a corrective script will need to be ran after every dynamic scan
-imported through either API version.
-
-The script can be found
-[here](https://github.com/DefectDojo/django-DefectDojo/blob/dev/dojo/management/commands/create_endpoint_status.py)
-
-`./manage.py create\_endpoint\_status`
-
-If you\'re using docker:
-
-`docker-compose exec uwsgi ./manage.py create\_endpoint\_status`
-
-This can take a while depending on your hardware and the number of
-findings in your instance.
-
-## Upgrading to DefectDojo Version 1.7.0
-
-**What\'s New:**
-
-- Updated search, you can now search for CVE-XXXX-YYYY
-- Updated search index, fields added to index: \'id\', \'title\',
- \'cve\', \'url\', \'severity\', \'description\', \'mitigation\',
- \'impact\', \'steps\_to\_reproduce\', \'severity\_justification\',
- \'references\', \'sourcefilepath\', \'sourcefile\', \'hash\_code\',
- \'file\_path\', \'component\_name\', \'component\_version\',
- \'unique\_id\_from\_tool\'
-
-This requires a (one-time) rebuild of the Django-Watson search index.
-Execute the django command from the defect dojo installation directory:
-
-`./manage.py buildwatson dojo.Finding`
-
-If you\'re using docker:
-
-`docker-compose exec uwsgi ./manage.py buildwatson dojo.Finding`
-
-Upgrading to DefectDojo Version 1.5.0
--------------------------------------
-
-**What\'s New:**
-
-- Updated UI with a new DefectDojo logo, default colors and CSS.
-- Updated Product views with tabs for Product Overview, Metrics,
- Engagements, Endpoints, Benchmarks (ASVS), and Settings to make it
- easier to navigate and manage your products.
-- New Product Information fields: Regulations, Criticality, Platform,
- Lifecycle, Origin, User Records, Revenue, External Audience,
- Internet Accessible
-- Languages pie chart on product overview, only supported through the
- API and Django admin, integrates with cloc analyzer
-- New Engagement type of CI/CD to support continual testing
-- Engagement shortcuts and ability to import findings and auto-create
- an engagement
-- Engagement labels for overdue, no tests and findings
-- New Contextual menus throughout DefectDojo and shortcuts to new
- findings and critical findings
-- Ability to merge a finding into a parent finding and either
- inactivate or delete the merged findings.
-- Report improvements and styling adjustment with the default option
- of HTML reports
-- SLA for remediation of severities based on finding criticality, for
- example critical findings remediated within 7 days. Configurable in
- System Settings.
-- Engagement Auto-Close Days in System Settings. Automatically close
- an engagement if open past the end date.
-- Ability to apply remediation advice based on CWE. For example XSS
- can be configured as a template so that it\'s consistent across all
- findings. Enabled in system settings.
-- Finding confidence field supported from scanners. First
- implementation in the Burp importer.
-- Goast importer for static analysis of Golang products
-- Celery status check on System Settings
-- Beta rules framework release for modifying findings on the fly
-- DefectDojo 2.0 API with Swagger support
-- Created and Modified fields on all major tables
-- Various bug fixes reported on Github
-
-**Upgrading to 1.5.0 requirements:**
-
-1. Back up your database first, ideally take the backup from production
- and test the upgrade on a staging server.
-2. Edit the settings.py file which can be found in
- `django-DefectDojo/dojo/settings/settings.py`. Copy in the rest
- framework configuration after the CSRF\_COOKIE\_SECURE = True:
-
- REST_FRAMEWORK = {
- 'DEFAULT_AUTHENTICATION_CLASSES': (
- 'rest_framework.authentication.TokenAuthentication',
- 'rest_framework.authentication.BasicAuthentication',
- ),
- 'DEFAULT_PERMISSION_CLASSES': (
- 'rest_framework.permissions.DjangoModelPermissions',
- ),
- 'DEFAULT_RENDERER_CLASSES': (
- 'rest_framework.renderers.JSONRenderer',
- ),
- 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
- 'PAGE_SIZE': 25
- }
-
-Navigate to: LOGIN\_EXEMPT\_URLS and add the following after
-r\'\^%sfinding/image/(?P\\[\^/\]+)\$\' % URL\_PREFIX:
-
- r'^%sfinding/image/(?P[^/]+)$' % URL_PREFIX,
- r'^%sapi/v2/' % URL_PREFIX,
-
-Navigate to: INSTALLED\_APPS and add the following after:
-\'multiselectfield\',:
-
- 'multiselectfield',
- 'rest_framework',
- 'rest_framework.authtoken',
- 'rest_framework_swagger',
- 'dbbackup',
-
-Navigate to: CELERY\_TASK\_IGNORE\_RESULT = True and add the following
-after CELERY\_TASK\_IGNORE\_RESULT line:
-
- CELERY_RESULT_BACKEND = 'db+sqlite:///dojo.celeryresults.sqlite'
-
-Save your modified settings file. For reference the modified file should
-look like the new 1.5.0
-\[settings\]()
-file, minus the environmental configurations. As an alternative this
-file can be used and the enviromental configurations from you
-environment can be copied into this file.
-
-3. Activate your virtual environment and then upgrade the requirements:
-
-`pip install -r requirements.txt --upgrade`
-
-4. Upgrade the database:
-
- ./manage.py makemigrations
- ./manage.py migrate
-
-5. Collect the static files (Javascript, Images, CSS):
-
- ./manage.py collectstatic --noinput
-
-6. Complete
-
-## Upgrading to DefectDojo Version 1.3.1
-
-**What\'s New:**
-
-- New importers for Contrast, Nikto and TruffleHog (finding secrets in
- git repos).
-- Improved merging of findings for dynamic and static importers
-- Markdown support for findings
-- HTML report improvements including support of Markdown.
-- System settings Celery status page to assist in debugging if Celery
- is functional.
-
-**Upgrading to 1.3.1 requires:**
-
-1. pip install markdown pip install pandas
-2. ./manage.py makemigrations ./manage.py migrate
-3. ./manage.py collectstatic \--noinput
-4. Complete
-
-## Upgrading to DefectDojo Version 1.2.9
-
-**What\'s New:** New feature: Benchmarks (OWASP ASVS)
-
-**Upgrading to 1.2.9 requires:**
-
-1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
- dojo/fixtures/benchmark\_type.json ./manage.py loaddata
- dojo/fixtures/benchmark\_category.json ./manage.py loaddata
- dojo/fixtures/benchmark\_requirement.json
-2. ./manage.py collectstatic \--noinput
-3. Complete
-
-## Upgrading to DefectDojo Version 1.2.8
-
-New feature: Product Grading (Overall Product Health) Upgrading to 1.2.8
-requires:
-
-1. ./manage.py makemigrations ./manage.py migrate ./manage.py
- system\_settings
-2. ./manage.py collectstatic \--noinput
-3. pip install asteval
-4. pip install \--upgrade celery
-5. Complete
-
-## Upgrading to DefectDojo Version 1.2.4
-
-Upgrading to 1.2.4 requires:
-
-1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
- dojo/fixtures/objects\_review.json
-
-## Upgrading to DefectDojo Version 1.2.3
-
-Upgrading to 1.2.3 requires:
-
-1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
- dojo/fixtures/language\_type.json
-2. Currently languages and technologies can be updated via the API or
- in the admin section of Django.
-
-## July 6th 2017 - New location for system settings
-
-Pull request \#313 moves a number of system settings previously located
-in the application\'s settings.py to a model that can be used and
-changed within the web application under \"Configuration -\> System
-Settings\".
-
-If you\'re using a custom `URL_PREFIX` you will need to set this in the
-model after upgrading by editing `dojo/fixtures/system_settings.json`
-and setting your URL prefix in the `url_prefix` value there. Then issue
-the command `./manage.py loaddata system_settings.json` to load your
-settings into the database.
-
-If you\'re not using a custom `URL_PREFIX`, after upgrading simply go to
-the System Settings page and review which values you want to set for
-each setting, as they\'re not automatically migrated from settings.py.
-
-If you like you can then remove the following settings from settings.py
-to avoid confusion:
-
-- `ENABLE_DEDUPLICATION`
-- `ENABLE_JIRA`
-- `S_FINDING_SEVERITY_NAMING`
-- `URL_PREFIX`
-- `TIME_ZONE`
-- `TEAM_NAME`
-
-## Upgrading to DefectDojo Version 1.2.2
-
-Upgrading to 1.2.2 requires:
-
-1. Copying settings.py to the settings/ folder.
-2. If you have supervisor scripts change
- DJANGO\_SETTINGS\_MODULE=dojo.settings.settings
-
-## Upgrading to Django 1.1.5
-
-If you are upgrading an existing version of DefectDojo, you will need to
-run the following commands manually:
-
-1. First install Yarn. Follow the instructions based on your OS:
-
-2. The following must be removed/commented out from `settings.py`: :
-
- 'djangobower.finders.BowerFinder',
-
- From the line that contains:
- # where should bower install components
- ...
-
- To the end of the bower declarations
- 'justgage'
- )
-
-3. The following needs to be updated in `settings.py`: :
-
- STATICFILES_DIRS = (
- # Put strings here, like "/home/html/static" or "C:/www/django/static".
- # Always use forward slashes, even on Windows.
- # Don't forget to use absolute paths, not relative paths.
- os.path.dirname(DOJO_ROOT) + "/components/yarn_components",
- )
-
-## Upgrading to Django 1.11
-
-Pull request \#300 makes DefectDojo Django 1.11 ready. A fresh install
-of DefectDojo can be done with the setup.bash script included - no
-special steps are required.
-
-If you are upgrading an existing installation of DefectDojo, you will
-need to run the following commands manually: :
-
- pip install django-tastypie --upgrade
- pip install django-tastypie-swagger --upgrade
- pip install django-filter --upgrade
- pip install django-watson --upgrade
- pip install django-polymorphic --upgrade
- pip install django --upgrade
- pip install pillow --upgrade
- ./manage.py makemigrations
- ./manage.py migrate
-
-The following must be removed/commented out from settings.py: :
-
- TEMPLATE_DIRS
- TEMPLATE_DEBUG
- TEMPLATE_LOADERS
- TEMPLATE_CONTEXT_PROCESSORS
-
-The following needs to be added to settings.py: :
-
- TEMPLATES = [
- {
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.debug',
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- ],
- },
- },
- ]
-
-Once all these steps are completed your installation of DefectDojo will
-be running under Django 1.11
diff --git a/docs/content/en/getting_started/upgrading/1.10.md b/docs/content/en/getting_started/upgrading/1.10.md
new file mode 100644
index 00000000000..540ec306aab
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.10.md
@@ -0,0 +1,50 @@
+---
+title: "Upgrading to DefectDojo Version 1.10.x"
+toc_hide: true
+weight: -20201124
+description: security release + breaking changes
+---
+**1.10.4 is a security release**
+
+- See the security advisory:
+
+- See release notes:
+
+- Version 1.10.4 replaces 1.10.3 as the latter contained an incomplete
+ fix
+
+**What\'s New:**
+
+- See release notes:
+
+- DefectDojo now provides a `settings.py` file
+ out-of-the-box. Custom settings need to go into
+ `local\_settings.py`. See
+
+ and
+
+- A quickfix is to rename your own / customized
+ `settings.py` or `settings.dist.py` to
+ `local\_settings.py`. Details of that PR:
+
+- Major JIRA integration refactoring, for which you should at least
+ use 1.10.1 and not 1.10.0 for many bug fixes.
+
+**Breaking changes**
+
+Kubernetes/Helm users: we have moved away from the \"stable\" repository
+to \"bitnami\" in this release. The bitnami postgresql chart required us
+to add a new key to the postgresql secret, which will give you the error
+`postgresql-postgres-password is missing` if you have
+`createPostgresqlSecret: false`. In 1.10.1, a fix was also included to
+allow your existing `postgresqlPassword` to be reused properly.
+
+Including in 1.10.1 were a couple fixes related to a rabbitMQ upgrade.
+The path to access `password`, `erlangCookie` and
+`existingPasswordSecret` changed from `rabbitmq` to `auth`. Furthermore,
+as rabbitMQ is deployed as a StatefulSet, an in-place upgrade is not
+possible and an error will likely be thrown such as
+`Forbidden: updates to statefulset spec for fields other than 'replicas', 'template', and 'updateStrategy' are forbidden`.
+After ensuring your rabbitMQ celery queue is empty, you will then want
+to delete your rabbitMQ StatefulSet and PVC to allow them to get
+re-created, or fully delete and recreate defectdojo.
diff --git a/docs/content/en/getting_started/upgrading/1.11.md b/docs/content/en/getting_started/upgrading/1.11.md
new file mode 100644
index 00000000000..9110d06f153
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.11.md
@@ -0,0 +1,8 @@
+---
+title: "Upgrading to DefectDojo Version 1.11.x"
+toc_hide: true
+weight: -20201229
+description: security release
+---
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.11.0
+- 1.11.1 is a security release https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.11.1
diff --git a/docs/content/en/getting_started/upgrading/1.12.md b/docs/content/en/getting_started/upgrading/1.12.md
new file mode 100644
index 00000000000..39c8371d91c
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.12.md
@@ -0,0 +1,8 @@
+---
+title: "Upgrading to DefectDojo Version 1.12.x"
+toc_hide: true
+weight: -20210126
+description: security release
+---
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.12.0
+- 1.12.1 is a security release https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.12.1
diff --git a/docs/content/en/getting_started/upgrading/1.13.md b/docs/content/en/getting_started/upgrading/1.13.md
new file mode 100644
index 00000000000..b5948a91a6f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.13.md
@@ -0,0 +1,17 @@
+---
+title: "Upgrading to DefectDojo Version 1.13.x"
+toc_hide: true
+weight: -20210223
+description: hashcode calculation logic has changed
+---
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.13.0
+- Hashcode settings affecting deduplication have changed, to update existing findings run:
+
+ `./manage.py dedupe`
+
+If you're using docker:
+
+ docker-compose exec uwsgi ./manage.py dedupe
+
+This can take a while depeneding on your instance size. It might possible that new duplicates are detected among existing findings, so make a backup before running!
+
diff --git a/docs/content/en/getting_started/upgrading/1.14.md b/docs/content/en/getting_started/upgrading/1.14.md
new file mode 100644
index 00000000000..4f7c72981e9
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.14.md
@@ -0,0 +1,15 @@
+---
+title: "Upgrading to DefectDojo Version 1.14.x"
+toc_hide: true
+weight: -20210330
+description: hashcode calculation logic has changed
+---
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.14.0
+
+Note that the below fields are now optional without default value. They will not be filled anymore with values such as "No references given" when found empty while saving the findings
+- mitigation
+- references
+- impact
+- url
+
+
diff --git a/docs/content/en/getting_started/upgrading/1.15.md b/docs/content/en/getting_started/upgrading/1.15.md
new file mode 100644
index 00000000000..4341ba51274
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.15.md
@@ -0,0 +1,20 @@
+---
+title: "Upgrading to DefectDojo Version 1.15.x"
+toc_hide: true
+weight: -20210500
+description: hashcode calculation logic has changed
+---
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.15.0
+- If you have made changes to JIRA templates or the template config in the JIRA Project config for instances/products/engagements:
+The jira template settings introduced in 1.13 have been changed. You now have to select a subfolder instead of a sinlge template file. If you have chosen a non-default template here, you have to reapply that to all products / engagements. Also you have to move your custom templates into the correct subfolder in `dojo/templates/issue-trackers/`.
+- Hashcode calculation logic has changed in #4134, #4308 and #4310 to update existing findings run:
+
+ `./manage.py dedupe --hash_code_only`
+
+If you're using docker:
+
+`docker-compose exec uwsgi ./manage.py dedupe --hash_code_only`
+
+This can take a while depending on your instance size.
+
+
diff --git a/docs/content/en/getting_started/upgrading/1.2.2.md b/docs/content/en/getting_started/upgrading/1.2.2.md
new file mode 100644
index 00000000000..e5366982819
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.2.2.md
@@ -0,0 +1,11 @@
+---
+title: "Upgrading to DefectDojo Version 1.2.2"
+toc_hide: true
+weight: -20200202
+description: multiple instructions
+---
+Upgrading to 1.2.2 requires:
+
+1. Copying settings.py to the settings/ folder.
+2. If you have supervisor scripts change
+ DJANGO\_SETTINGS\_MODULE=dojo.settings.settings
diff --git a/docs/content/en/getting_started/upgrading/1.2.3.md b/docs/content/en/getting_started/upgrading/1.2.3.md
new file mode 100644
index 00000000000..ea2685271d2
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.2.3.md
@@ -0,0 +1,12 @@
+---
+title: "Upgrading to DefectDojo Version 1.2.3"
+toc_hide: true
+weight: -20200203
+description: multiple instructions
+---
+Upgrading to 1.2.3 requires:
+
+1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
+ dojo/fixtures/language\_type.json
+2. Currently languages and technologies can be updated via the API or
+ in the admin section of Django.
diff --git a/docs/content/en/getting_started/upgrading/1.2.4.md b/docs/content/en/getting_started/upgrading/1.2.4.md
new file mode 100644
index 00000000000..54ed3c196cb
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.2.4.md
@@ -0,0 +1,10 @@
+---
+title: "Upgrading to DefectDojo Version 1.2.4"
+toc_hide: true
+weight: -20200204
+description: multiple instructions
+---
+Upgrading to 1.2.4 requires:
+
+1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
+ dojo/fixtures/objects\_review.json
diff --git a/docs/content/en/getting_started/upgrading/1.2.8.md b/docs/content/en/getting_started/upgrading/1.2.8.md
new file mode 100644
index 00000000000..d8fd7029f65
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.2.8.md
@@ -0,0 +1,15 @@
+---
+title: "Upgrading to DefectDojo Version 1.2.8"
+toc_hide: true
+weight: -20200208
+description: multiple instructions
+---
+New feature: Product Grading (Overall Product Health) Upgrading to 1.2.8
+requires:
+
+1. ./manage.py makemigrations ./manage.py migrate ./manage.py
+ system\_settings
+2. ./manage.py collectstatic \--noinput
+3. pip install asteval
+4. pip install \--upgrade celery
+5. Complete
diff --git a/docs/content/en/getting_started/upgrading/1.2.9.md b/docs/content/en/getting_started/upgrading/1.2.9.md
new file mode 100644
index 00000000000..ad798aa280b
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.2.9.md
@@ -0,0 +1,16 @@
+---
+title: "Upgrading to DefectDojo Version 1.2.9"
+toc_hide: true
+weight: -20200209
+description: multiple instructions
+---
+**What\'s New:** New feature: Benchmarks (OWASP ASVS)
+
+**Upgrading to 1.2.9 requires:**
+
+1. ./manage.py makemigrations ./manage.py migrate ./manage.py loaddata
+ dojo/fixtures/benchmark\_type.json ./manage.py loaddata
+ dojo/fixtures/benchmark\_category.json ./manage.py loaddata
+ dojo/fixtures/benchmark\_requirement.json
+2. ./manage.py collectstatic \--noinput
+3. Complete
diff --git a/docs/content/en/getting_started/upgrading/1.3.1.md b/docs/content/en/getting_started/upgrading/1.3.1.md
new file mode 100644
index 00000000000..239463d664b
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.3.1.md
@@ -0,0 +1,22 @@
+---
+title: "Upgrading to DefectDojo Version 1.3.1"
+toc_hide: true
+weight: -20200301
+description: multiple instructions
+---
+**What\'s New:**
+
+- New importers for Contrast, Nikto and TruffleHog (finding secrets in
+ git repos).
+- Improved merging of findings for dynamic and static importers
+- Markdown support for findings
+- HTML report improvements including support of Markdown.
+- System settings Celery status page to assist in debugging if Celery
+ is functional.
+
+**Upgrading to 1.3.1 requires:**
+
+1. pip install markdown pip install pandas
+2. ./manage.py makemigrations ./manage.py migrate
+3. ./manage.py collectstatic \--noinput
+4. Complete
diff --git a/docs/content/en/getting_started/upgrading/1.7.0.md b/docs/content/en/getting_started/upgrading/1.7.0.md
new file mode 100644
index 00000000000..4c70df94cef
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.7.0.md
@@ -0,0 +1,130 @@
+---
+title: "Upgrading to DefectDojo Version 1.7.0"
+toc_hide: true
+weight: -20200700
+description: multiple instructions
+---
+**What\'s New:**
+
+- Updated search, you can now search for CVE-XXXX-YYYY
+- Updated search index, fields added to index: \'id\', \'title\',
+ \'cve\', \'url\', \'severity\', \'description\', \'mitigation\',
+ \'impact\', \'steps\_to\_reproduce\', \'severity\_justification\',
+ \'references\', \'sourcefilepath\', \'sourcefile\', \'hash\_code\',
+ \'file\_path\', \'component\_name\', \'component\_version\',
+ \'unique\_id\_from\_tool\'
+
+This requires a (one-time) rebuild of the Django-Watson search index.
+Execute the django command from the defect dojo installation directory:
+
+`./manage.py buildwatson dojo.Finding`
+
+If you\'re using docker:
+
+`docker-compose exec uwsgi ./manage.py buildwatson dojo.Finding`
+
+Upgrading to DefectDojo Version 1.5.0
+-------------------------------------
+
+**What\'s New:**
+
+- Updated UI with a new DefectDojo logo, default colors and CSS.
+- Updated Product views with tabs for Product Overview, Metrics,
+ Engagements, Endpoints, Benchmarks (ASVS), and Settings to make it
+ easier to navigate and manage your products.
+- New Product Information fields: Regulations, Criticality, Platform,
+ Lifecycle, Origin, User Records, Revenue, External Audience,
+ Internet Accessible
+- Languages pie chart on product overview, only supported through the
+ API and Django admin, integrates with cloc analyzer
+- New Engagement type of CI/CD to support continual testing
+- Engagement shortcuts and ability to import findings and auto-create
+ an engagement
+- Engagement labels for overdue, no tests and findings
+- New Contextual menus throughout DefectDojo and shortcuts to new
+ findings and critical findings
+- Ability to merge a finding into a parent finding and either
+ inactivate or delete the merged findings.
+- Report improvements and styling adjustment with the default option
+ of HTML reports
+- SLA for remediation of severities based on finding criticality, for
+ example critical findings remediated within 7 days. Configurable in
+ System Settings.
+- Engagement Auto-Close Days in System Settings. Automatically close
+ an engagement if open past the end date.
+- Ability to apply remediation advice based on CWE. For example XSS
+ can be configured as a template so that it\'s consistent across all
+ findings. Enabled in system settings.
+- Finding confidence field supported from scanners. First
+ implementation in the Burp importer.
+- Goast importer for static analysis of Golang products
+- Celery status check on System Settings
+- Beta rules framework release for modifying findings on the fly
+- DefectDojo 2.0 API with Swagger support
+- Created and Modified fields on all major tables
+- Various bug fixes reported on Github
+
+**Upgrading to 1.5.0 requirements:**
+
+1. Back up your database first, ideally take the backup from production
+ and test the upgrade on a staging server.
+2. Edit the settings.py file which can be found in
+ `django-DefectDojo/dojo/settings/settings.py`. Copy in the rest
+ framework configuration after the CSRF\_COOKIE\_SECURE = True:
+
+ REST_FRAMEWORK = {
+ 'DEFAULT_AUTHENTICATION_CLASSES': (
+ 'rest_framework.authentication.TokenAuthentication',
+ 'rest_framework.authentication.BasicAuthentication',
+ ),
+ 'DEFAULT_PERMISSION_CLASSES': (
+ 'rest_framework.permissions.DjangoModelPermissions',
+ ),
+ 'DEFAULT_RENDERER_CLASSES': (
+ 'rest_framework.renderers.JSONRenderer',
+ ),
+ 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
+ 'PAGE_SIZE': 25
+ }
+
+Navigate to: LOGIN\_EXEMPT\_URLS and add the following after
+r\'\^%sfinding/image/(?P\\[\^/\]+)\$\' % URL\_PREFIX:
+
+ r'^%sfinding/image/(?P[^/]+)$' % URL_PREFIX,
+ r'^%sapi/v2/' % URL_PREFIX,
+
+Navigate to: INSTALLED\_APPS and add the following after:
+\'multiselectfield\',:
+
+ 'multiselectfield',
+ 'rest_framework',
+ 'rest_framework.authtoken',
+ 'rest_framework_swagger',
+ 'dbbackup',
+
+Navigate to: CELERY\_TASK\_IGNORE\_RESULT = True and add the following
+after CELERY\_TASK\_IGNORE\_RESULT line:
+
+ CELERY_RESULT_BACKEND = 'db+sqlite:///dojo.celeryresults.sqlite'
+
+Save your modified settings file. For reference the modified file should
+look like the new 1.5.0
+\[settings\]()
+file, minus the environmental configurations. As an alternative this
+file can be used and the enviromental configurations from you
+environment can be copied into this file.
+
+3. Activate your virtual environment and then upgrade the requirements:
+
+`pip install -r requirements.txt --upgrade`
+
+4. Upgrade the database:
+
+ ./manage.py makemigrations
+ ./manage.py migrate
+
+5. Collect the static files (Javascript, Images, CSS):
+
+ ./manage.py collectstatic --noinput
+
+6. Complete
diff --git a/docs/content/en/getting_started/upgrading/1.8.0.md b/docs/content/en/getting_started/upgrading/1.8.0.md
new file mode 100644
index 00000000000..150d72d7bec
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.8.0.md
@@ -0,0 +1,42 @@
+---
+title: "Upgrading to DefectDojo Version 1.8.0"
+toc_hide: true
+weight: -20200800
+description: fix buildwatson create_endpoint_status
+---
+**What\'s New:**
+
+- See release notes:
+
+- Improved search, which requires an index rebuild
+ ()
+
+This requires a (one-time) rebuild of the Django-Watson search index.
+Execute the django command from the defect dojo installation directory:
+
+`./manage.py buildwatson`
+
+If you\'re using docker:
+
+`docker-compose exec uwsgi ./manage.py buildwatson`
+
+This can take a while depending on your hardware and the number of
+findings in your instance.
+
+- **NOTE:**
+
+As a result of a breaking bug revolving around Endpoint\_status objects,
+a corrective script will need to be ran after every dynamic scan
+imported through either API version.
+
+The script can be found
+[here](https://github.com/DefectDojo/django-DefectDojo/blob/dev/dojo/management/commands/create_endpoint_status.py)
+
+`./manage.py create\_endpoint\_status`
+
+If you\'re using docker:
+
+`docker-compose exec uwsgi ./manage.py create\_endpoint\_status`
+
+This can take a while depending on your hardware and the number of
+findings in your instance.
diff --git a/docs/content/en/getting_started/upgrading/1.9.3.md b/docs/content/en/getting_started/upgrading/1.9.3.md
new file mode 100644
index 00000000000..abbef40ab0f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/1.9.3.md
@@ -0,0 +1,44 @@
+---
+title: "Upgrading to DefectDojo Version 1.9.3"
+toc_hide: true
+weight: -20201115
+description: security release
+---
+**This is a security release**
+
+- See the [security
+ advisory](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-8q8j-7wc4-vjg5)
+- See [release
+ notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/1.9.3)
+
+**What\'s New:**
+
+- See release notes:
+
+
+**NOTE:**
+
+When upgrading from before 1.9.2, a corrective script may need to be ran
+
+`./manage.py create\_endpoint\_status`
+
+If you\'re using docker:
+
+`docker-compose exec uwsgi ./manage.py create\_endpoint\_status`
+
+This can take a while depending on your hardware and the number of
+findings in your instance.
+
+- Search index tweaking index rebuild after upgrade:
+
+This requires a (one-time) rebuild of the Django-Watson search index.
+Execute the django command from the defect dojo installation directory:
+
+`./manage.py buildwatson]`
+
+If you\'re using docker:
+
+`docker-compose exec uwsgi ./manage.py buildwatson`
+
+This can take a while depending on your hardware and the number of
+findings in your instance.
diff --git a/docs/content/en/getting_started/upgrading/2.0.md b/docs/content/en/getting_started/upgrading/2.0.md
new file mode 100644
index 00000000000..2f45f98e63f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.0.md
@@ -0,0 +1,66 @@
+---
+title: "Upgrading to DefectDojo Version 2.0.x"
+toc_hide: true
+weight: -20210629
+description: breaking changes
+---
+Follow the usual steps to upgrade as described above.
+
+BEFORE UPGRADING
+- If you are using SAML2 checkout the new [documentaion](https://documentation.defectdojo.com/integrations/social-authentication/#saml-20) and update you settings following the migration section. We replaced [django-saml2-auth](https://github.com/fangli/django-saml2-auth) with [djangosaml2](https://github.com/IdentityPython/djangosaml2).
+
+AFTER UPGRADING
+- Usual migration process (`python manage.py migrate`) try to migrate all endpoints to new format and merge duplicates.
+- All broken endpoints (which weren't possible to migrate) have red flag 🚩 in standard list of endpoints.
+- Check if all your endpoints was migrated successfully, go to: https:///endpoint/migrate.
+- Alternatively, this can be run as management command: `docker-compose exec uwsgi ./manage.py endpoint_migration --dry-run`
+- When all endpoint will be fixed (there is not broken endpoint), press "Run migration" in https:///endpoint/migrate
+- Or, you can run management command: `docker-compose exec uwsgi ./manage.py endpoint_migration`
+- Details about endpoint migration / improvements in https://github.com/DefectDojo/django-DefectDojo/pull/4473
+
+We decided to name this version 2.0.0 because we did some big cleanups in this release:
+
+- Remove API v1 ([#4413](https://github.com/DefectDojo/django-DefectDojo/pull/4413))
+- Remove setup.bash installation method ([#4417](https://github.com/DefectDojo/django-DefectDojo/pull/4417))
+- Rename Finding.is_Mitigated field to Finding.is_mitigated ([#3854](https://github.com/DefectDojo/django-DefectDojo/pull/4854))
+- Remove everything related to the old tagging library ([#4419](https://github.com/DefectDojo/django-DefectDojo/pull/4419))
+- Remove S0/S1/S2../S5 severity display option ([#4415](https://github.com/DefectDojo/django-DefectDojo/pull/4415))
+- Refactor EndPoint handling/formatting ([#4473](https://github.com/DefectDojo/django-DefectDojo/pull/4473))
+- Upgrade to Django 3.x ([#3632](https://github.com/DefectDojo/django-DefectDojo/pull/3632))
+- PDF Reports removed ([#4418](https://github.com/DefectDojo/django-DefectDojo/pull/4418))
+- Hashcode calculation logic has changed. To update existing findings run:
+
+ `./manage.py dedupe --hash_code_only`.
+
+If you're using docker:
+
+`docker-compose exec uwsgi ./manage.py dedupe --hash_code_only`.
+
+This can take a while depending on your instance size.
+
+- See release notes: https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.0.0
+
+### Endpoints
+
+- The usual migration process (`python manage.py migrate`) tries to migrate all endpoints to new format and merge duplicates.
+- All broken endpoints (which weren't possible to migrate) have a red flag 🚩 in the standard list of endpoints.
+- Check if all your endpoints were migrated successfully, go to: https:///endpoint/migrate.
+- Alternatively, this can be run as management command: `docker-compose exec uwsgi ./manage.py endpoint_migration --dry-run`
+- When all endpoint are fixed (there is not broken endpoint), press "Run migration" in https:///endpoint/migrate
+- Or, you can run management command: `docker-compose exec uwsgi ./manage.py endpoint_migration`
+- Details about endpoint migration / improvements in https://github.com/DefectDojo/django-DefectDojo/pull/4473
+
+### Authorization
+
+The new authorization system for Products and Product Types based on roles is the default now. The fields for authorized users are not available anymore, but you can assign roles as described in [Permissions](../../usage/permissions). Users are migrated automatically, so that their permissions are as close as possible to the previous authorization:
+- Superusers will still have all permissions on Products and Product Types, so they must not be changed.
+- Staff users have had all permissions for all product types and products, so they will be get a global role as *Owner*.
+- Product_Members and Product Type_Members will be added for authorized users according to the settings for the previous authorization:
+ - The *Reader* role is set as the default.
+ - If `AUTHORIZED_USERS_ALLOW_STAFF` is `True`, the user will get the *Owner* role for the respective Product or Product Type.
+ - If `AUTHORIZED_USERS_ALLOW_CHANGE` or `AUTHORIZED_USERS_ALLOW_DELETE` is `True`, the user will get the *Writer* role for the respective Product or Product Type.
+
+The new authorization is active for both UI and API. Permissions set via authorized users or via the Django Admin interface are no longer taken into account.
+
+Please review the roles for your users after the upgrade to avoid an unintended permissions creep.
+
diff --git a/docs/content/en/getting_started/upgrading/2.10.md b/docs/content/en/getting_started/upgrading/2.10.md
new file mode 100644
index 00000000000..cb5a19774e1
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.10.md
@@ -0,0 +1,10 @@
+---
+title: "Upgrading to DefectDojo Version 2.10.x"
+toc_hide: true
+weight: -20220503
+description: breaking change
+---
+**Breaking change for Findings:** The field `cve` will be replaced by a list of Vulnerability Ids, which can store references to security advisories associated with this finding. These can be Common Vulnerabilities and Exposures (CVE) or from other sources, eg. GitHub Security Advisories. Although the field does still exist in the code, the API and the UI have already been changed to use the list of Vulnerability Ids. Other areas like hash code calculation, search and parsers will be migrated step by step in later stages.
+
+This change also causes an API change for the endpoint `/engagements/{id}/accept_risks/`.
+
diff --git a/docs/content/en/getting_started/upgrading/2.12.md b/docs/content/en/getting_started/upgrading/2.12.md
new file mode 100644
index 00000000000..10bdec5369f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.12.md
@@ -0,0 +1,8 @@
+---
+title: "Upgrading to DefectDojo Version 2.12.x"
+toc_hide: true
+weight: -20220705
+description: breaking change
+---
+**Breaking change for search:** The field `cve` has been removed from the search index for Findings and the Vulnerability Ids have been added to the search index. With this the syntax to search explicitly for vulnerability ids have been changed from `cve:` to `vulnerability_id:`, e.g. `vulnerability_id:CVE-2020-27619`.
+
diff --git a/docs/content/en/getting_started/upgrading/2.13.md b/docs/content/en/getting_started/upgrading/2.13.md
new file mode 100644
index 00000000000..24432b2475a
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.13.md
@@ -0,0 +1,37 @@
+---
+title: "Upgrading to DefectDojo Version 2.13.x"
+toc_hide: true
+weight: -20220802
+description: instructions for helm chart and others
+---
+The last release implemented the search for vulnerability ids, but the search database was not initialized. To populate the database table of the vulnerability ids, execute this django command from the defect dojo installation directory or from a shell of the Docker container or Kubernetes pod:
+
+`./manage.py migrate_cve`
+
+Additionally this requires a one-time rebuild of the Django-Watson search index. Execute this django command from the defect dojo installation directory or from a shell of the Docker container or Kubernetes pod:
+
+`./manage.py buildwatson`
+
+**Upgrade instructions for helm chart with postgres enabled**: The postgres database uses a statefulset by default. Before upgrading the helm chart we have to delete the statefullset and ensure that the pvc is reused, to keep the data. For more information: https://docs.bitnami.com/kubernetes/infrastructure/postgresql/administration/upgrade/ .
+
+```bash
+helm repo update
+helm dependency update ./helm/defectdojo
+
+# obtain name oft the postgres pvc
+export POSTGRESQL_PVC=$(kubectl get pvc -l app.kubernetes.io/instance=defectdojo,role=primary -o jsonpath="{.items[0].metadata.name}")
+
+# delete postgres statefulset
+kubectl delete statefulsets.apps defectdojo-postgresql --namespace default --cascade=orphan
+
+# upgrade
+helm upgrade \
+ defectdojo \
+ ./helm/defectdojo/ \
+ --set primary.persistence.existingClaim=$POSTGRESQL_PVC \
+ ... # add your custom settings
+```
+
+**Further changes:**
+
+Legacy authorization for changing configurations based on staff users has been removed.
diff --git a/docs/content/en/getting_started/upgrading/2.15.md b/docs/content/en/getting_started/upgrading/2.15.md
new file mode 100644
index 00000000000..7423ae3c8d9
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.15.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.15.x"
+toc_hide: true
+weight: -20221004
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.15.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.15.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.16.md b/docs/content/en/getting_started/upgrading/2.16.md
new file mode 100644
index 00000000000..3432c7b7757
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.16.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.16.x"
+toc_hide: true
+weight: -20221102
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.16.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.16.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.17.md b/docs/content/en/getting_started/upgrading/2.17.md
new file mode 100644
index 00000000000..ef872a49a68
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.17.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.17.x"
+toc_hide: true
+weight: -20221206
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.17.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.17.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.18.md b/docs/content/en/getting_started/upgrading/2.18.md
new file mode 100644
index 00000000000..e6085819897
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.18.md
@@ -0,0 +1,23 @@
+---
+title: "Upgrading to DefectDojo Version 2.18.x"
+toc_hide: true
+weight: -20230103
+description: instructions for helm chart
+---
+**Upgrade instructions for helm chart with rabbitMQ enabled**: The rabbitMQ uses a statefulset by default. Before upgrading the helm chart we have to ensure that all queues are empty:
+
+```bash
+kubectl exec -i -- rabbitmqctl list_queues
+```
+
+Next step is to delete rabbitMQ pvc:
+
+```bash
+kubectl delete pvc -l app.kubernetes.io/name=rabbitmq
+```
+
+Last step is to perform the upgrade.
+
+For more information: https://artifacthub.io/packages/helm/bitnami/rabbitmq/11.2.0
+
+
diff --git a/docs/content/en/getting_started/upgrading/2.19.md b/docs/content/en/getting_started/upgrading/2.19.md
new file mode 100644
index 00000000000..0b1f2da320e
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.19.md
@@ -0,0 +1,13 @@
+---
+title: "Upgrading to DefectDojo Version 2.19.x"
+toc_hide: true
+weight: -20230206
+description: breaking change
+---
+There are new docker images based on alpine with fewer third party dependencies. Related to the new images the current docker files had to be renamed and have a "-debian" or the new images a "-alpine" at the end. Furthermore there are new docker tags [DefectdojoVersion]-[OS]. For example 2.19.0-alpine or 2.19.0-debian. The currend tags (latest and [DefectdojoVersion]) are still based on the "old" images. Be aware that the new alpine images are not heavily tested and may contain bugs.
+
+**Breaking Change**
+
+In version 2.19.3, the GitHub OAuth integration has been removed to prevent configurations that may allow more access than intended.
+
+[DefectDojo Security Advisory: Severity Medium | Potential GitHub Authentication Misconfiguration](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-hfp4-q5pg-2p7r)
diff --git a/docs/content/en/getting_started/upgrading/2.2.md b/docs/content/en/getting_started/upgrading/2.2.md
new file mode 100644
index 00000000000..e81dd3f02f7
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.2.md
@@ -0,0 +1,11 @@
+---
+title: "Upgrading to DefectDojo Version 2.2.x"
+toc_hide: true
+weight: -20210831
+description: No special instructions.
+---
+Upgrade to 2.0.0 contained migration of endpoints. Some parts of migration haven't been done properly. This deficiency
+may manifest as a doubled slash in endpoint URLs (like `http://foo.bar:8080//test`) or as a problem with deduplication
+of the same endpoints. The mentioned bug was fixed in 2.2.0 and if you have seen these kinds of problems, just rerun
+"Endpoint migration" as it is written in [Upgrading to DefectDojo Version 2.0.x.](#upgrading-to-defectdojo-version-20x).
+
diff --git a/docs/content/en/getting_started/upgrading/2.20.md b/docs/content/en/getting_started/upgrading/2.20.md
new file mode 100644
index 00000000000..a2033f00b87
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.20.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.20.x"
+toc_hide: true
+weight: -20230306
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.20.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.20.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.21.md b/docs/content/en/getting_started/upgrading/2.21.md
new file mode 100644
index 00000000000..2e5726c8f99
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.21.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.21.x"
+toc_hide: true
+weight: -20230403
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.21.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.21.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.22.md b/docs/content/en/getting_started/upgrading/2.22.md
new file mode 100644
index 00000000000..1da6368422b
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.22.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.22.x"
+toc_hide: true
+weight: -20230501
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.22.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.22.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.23.md b/docs/content/en/getting_started/upgrading/2.23.md
new file mode 100644
index 00000000000..5ebcc4edc61
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.23.md
@@ -0,0 +1,21 @@
+---
+title: "Upgrading to DefectDojo Version 2.23.x"
+toc_hide: true
+weight: -20230605
+description: breaking change
+---
+There is a migration from the legacy Nessus and Nessus WAS parsers to a single Tenable parser. The updated Tenable parser simply merges existing support for Nessus and Nessus WAS without introducing new functionality that could create instability
+
+There is a migration process built into the upgrade that will automatically convert exiting Nessus and Nessus WAS findings and tests into Tenable findings and tests
+
+**Breaking Change**
+
+ - If there is any use of the Nessus or Nessus WAS in automated fashion via the import and reimport API endpoints, the `scan-type` parameter needs to be updated to `Tenable Scan`
+ - The default containerized database will now be [PostgreSQL](https://www.postgresql.org/) rather than [MySQL](https://dev.mysql.com/) due to the use of case insensitivity on fields by default
+ - It is recommended to update the [database character set and collation](https://dev.mysql.com/doc/refman/5.7/en/charset-database.html) to use UTF encoding
+ - If your deployment uses the MySQL containerized database, please see the following updates to run DefectDojo:
+ - Use of the helper script "dc-up": `./dc-up.sh mysql-rabbitmq` or `./dc-up.sh mysql-redis`
+ - Use of the helper script "dc-up-d": `./dc-up-d.sh mysql-rabbitmq` or `./dc-up-d.sh mysql-redis`
+ - Use of Docker Compose directly: `docker-compose --profile mysql-rabbitmq --env-file ./docker/environments/mysql-rabbitmq.env up` or `docker-compose --profile mysql-redis --env-file ./docker/environments/mysql-redis.env up`
+
+For all other changes, check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.23.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.24.md b/docs/content/en/getting_started/upgrading/2.24.md
new file mode 100644
index 00000000000..b5948678ac9
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.24.md
@@ -0,0 +1,8 @@
+---
+title: "Upgrading to DefectDojo Version 2.24.x"
+toc_hide: true
+weight: -20230703
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.24.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.24.0) for the contents of the release.
+
diff --git a/docs/content/en/getting_started/upgrading/2.25.md b/docs/content/en/getting_started/upgrading/2.25.md
new file mode 100644
index 00000000000..43502f0f96f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.25.md
@@ -0,0 +1,31 @@
+---
+title: "Upgrading to DefectDojo Version 2.25.x"
+toc_hide: true
+weight: -20230807
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.25.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.25.0) for the contents of the release.
+
+A few query parameters related to filtering object via API related to a products tags have been renamed to be more consistent with the other "related object tags":
+
+**Breaking Change**
+
+ - Engagement
+ - `product__tags__name` -> `product__tags`
+ - `not_product__tags__name` -> `not_product__tags`
+ - Test
+ - `engagement__product__tags__name` -> `engagement__product__tags`
+ - `not_engagement__product__tags__name` -> `not_engagement__product__tags`
+ - Finding
+ - `test__engagement__product__tags__name` -> `test__engagement__product__tags`
+ - `not_test__engagement__product__tags__name` -> `not_test__engagement__product__tags`
+
+**Deprecation**
+
+The OpenAPI 2.0 Swagger API documentation is being deprecated in favor of the existing
+OpenAPI 3.0 API documentation page. The OpenAPI 2.0 Swagger API documentation page is
+slated for removal in version 2.30.0
+
+*Note*: The API has not changed in any way and behaves the same between OAPI2 and OAPI3
+
+For all other changes, check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.25.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.26.md b/docs/content/en/getting_started/upgrading/2.26.md
new file mode 100644
index 00000000000..a89c77cbc22
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.26.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.26.x"
+toc_hide: true
+weight: -20230905
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.26.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.26.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.27.md b/docs/content/en/getting_started/upgrading/2.27.md
new file mode 100644
index 00000000000..581e02296f9
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.27.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.27.x"
+toc_hide: true
+weight: -20231002
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.27.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.27.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.28.md b/docs/content/en/getting_started/upgrading/2.28.md
new file mode 100644
index 00000000000..b456837cbb0
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.28.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.28.x"
+toc_hide: true
+weight: -20231106
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.28.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.28.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.29.md b/docs/content/en/getting_started/upgrading/2.29.md
new file mode 100644
index 00000000000..dfce7a5bbf6
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.29.md
@@ -0,0 +1,7 @@
+---
+title: "Upgrading to DefectDojo Version 2.29.x"
+toc_hide: true
+weight: -20231110
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.29.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.29.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.3.md b/docs/content/en/getting_started/upgrading/2.3.md
new file mode 100644
index 00000000000..73e8e0b25bf
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.3.md
@@ -0,0 +1,12 @@
+---
+title: "Upgrading to DefectDojo Version 2.3.x"
+toc_hide: true
+weight: -20211005
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.3.0.
+In 2.3.0 we [changed the default password hashing algorithm to Argon2 (from PBKDF2)](https://github.com/DefectDojo/django-DefectDojo/pull/5205).
+When logging in, exising hashes get replaced by an Argon2 hash. If you want to rehash password without users having to login,
+please see the [Django password management docs](https://docs.djangoproject.com/en/3.2/topics/auth/passwords/).
+The previous password hashing algorithm (PBKDF2) was not unsafe, but we wanted to follow the [OWASP guidelines](https://cheatsheetseries.owasp.org/cheatsheets/Password_Storage_Cheat_Sheet.html).
+
diff --git a/docs/content/en/getting_started/upgrading/2.30.md b/docs/content/en/getting_started/upgrading/2.30.md
new file mode 100644
index 00000000000..6029febd302
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.30.md
@@ -0,0 +1,17 @@
+---
+title: 'Upgrading to DefectDojo Version 2.30.x'
+toc_hide: true
+weight: -20231211
+description: Breaking Change for Auditlog.
+---
+There are instructions for upgrading to 2.30.0 if you disabled `enable_auditlog` before (read below). Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.30.0) for the contents of the release.
+
+**Breaking Change**
+
+Parameter `enable_auditlog` is not possible to set through System settings anymore. If you set this parameter or you need to change it to `False` (to disable audit logging), set environmental variable `DD_ENABLE_AUDITLOG` to `False`.
+
+If you are using docker-compose, another EnvVar should be added to the `docker-compose.yml` file in all the containers ran by the django image. This should do the trick
+```yaml
+DD_ENABLE_AUDITLOG: ${DD_ENABLE_AUDITLOG:-False}
+```
+Somewhere in the `environment` blocks for the `uwsgi`, `celerybeat`, `celeryworker`, and `init` containers.
\ No newline at end of file
diff --git a/docs/content/en/getting_started/upgrading/2.31.md b/docs/content/en/getting_started/upgrading/2.31.md
new file mode 100644
index 00000000000..8bc174a227e
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.31.md
@@ -0,0 +1,18 @@
+---
+title: 'Upgrading to DefectDojo Version 2.31.x'
+toc_hide: true
+weight: -20240102
+description: breaking change
+---
+
+To continue maintaining the most up to date list of parsers, the following actions have been taken:
+
+- OpenVAS XML and OpenVAS CSV were merged to OpenVAS Parser. There is a migration process built into the upgrade that will automatically convert exiting OpenVAS XML and OpenVAS CSV findings into OpenVAS Parser findings.
+- Clair Scan and Clair Klar Scan were merged to Clair Scan. There is a migration process built into the upgrade that will automatically convert exiting Clair Klar Scan findings to Clair Scan findings.
+- Whitesource has been renamed to Mend. There is a migration process built into the upgrade that will automatically convert exiting Whitesource findings and tests into Mend findings and tests
+
+**Breaking Change**
+
+ - If there is any use of the above mentioned parsers in automated fashion via the import and reimport API endpoints, the `scan-type` parameter needs to be updated accordingly.
+
+For all other changes, check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.31.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.32.md b/docs/content/en/getting_started/upgrading/2.32.md
new file mode 100644
index 00000000000..0d04c771e36
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.32.md
@@ -0,0 +1,7 @@
+---
+title: 'Upgrading to DefectDojo Version 2.32.x'
+toc_hide: true
+weight: -20240205
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.32.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.32.0) for the contents of the release.
diff --git a/docs/content/en/getting_started/upgrading/2.4.md b/docs/content/en/getting_started/upgrading/2.4.md
new file mode 100644
index 00000000000..36bfd7b1096
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.4.md
@@ -0,0 +1,14 @@
+---
+title: "Upgrading to DefectDojo Version 2.4.x (Security Release)"
+toc_hide: true
+weight: -20211102
+description: security Release
+---
+This releases fixes a High severity vulnerability for which the details will be disclosed on November 16th in [GHSA-fwg9-752c-qh8w](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-fwg9-752c-qh8w)
+
+There is a breaking change in the API for importing and re-importings scans with SonarQube API and Cobalt.io API. The [scan configurations
+have been unified](https://github.com/DefectDojo/django-DefectDojo/pull/5289) and are set now with the attribute `api_scan_configuration`.
+The existing configurations for SonarQube API and Cobalt.io API have been migrated.
+
+At the request of pyup.io, we had to remove the parser for Safety scans.
+
diff --git a/docs/content/en/getting_started/upgrading/2.5.md b/docs/content/en/getting_started/upgrading/2.5.md
new file mode 100644
index 00000000000..7d45b8995c8
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.5.md
@@ -0,0 +1,25 @@
+---
+title: "Upgrading to DefectDojo Version 2.5.x"
+toc_hide: true
+weight: -20211208
+description: legacy authorization removed
+---
+Legacy authorization has been completely removed with version 2.5.0. This includes removal of the migration of users
+to the new authorization as described in https://documentation.defectdojo.com/getting_started/upgrading/#authorization.
+If you are still using the legacy authorization, you should run the migration with ``./manage.py migrate_authorization_v2``
+before upgrading to version 2.5.0
+
+This release introduces the "Forgot password" functionality (`DD_FORGOT_PASSWORD`: default `True`). The function
+allows sending an e-mail with the reset password link. Missing configuration or misconfiguration of SMTP
+(`DD_EMAIL_URL`) could raise an error (HTTP-500). Check and test (for example by resetting your own password) if you
+configured SMTP correctly. If you want to avoid HTTP-500 and you don't want to set up SMTP, you can just simply switch
+off the "Forgot password" functionality (`DD_FORGOT_PASSWORD=False`).
+
+Release renamed system setting `mail_notifications_from` to `email_from`. This value will not be used only for sending
+notifications but also for sending the reset password emails. It is highly recommended to check the content of this
+value if you are satisfied. If you installed DefectDojo earlier, you can expect `"from@example.com"` there. A fresh
+installation will use `"no-reply@example.com"`
+
+This release [updates](https://github.com/DefectDojo/django-DefectDojo/pull/5450) our helm dependencies. There is a breaking change if you are using the mysql database from the helm chart because we replaced the deprecated chart from the stable repo with a chart from bitnami. If you have persistance enabled, ensure to backup your data before upgrading. All data get lost when replacing the mysql chart during the upgrade. For data migration take a look at the mysql backup and restore process.
+
+Furthermore we updated our kubernetes version. Current tests run on 1.18.16 and 1.22.0.
diff --git a/docs/content/en/getting_started/upgrading/2.6.md b/docs/content/en/getting_started/upgrading/2.6.md
new file mode 100644
index 00000000000..22f4a38bb85
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.6.md
@@ -0,0 +1,9 @@
+---
+title: "Upgrading to DefectDojo Version 2.6.x"
+toc_hide: true
+weight: -20220104
+description: No special instructions.
+---
+There are no special instructions for upgrading to 2.6.0. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.6.0) for the contents of the release.
+
+Please consult the security advisories [GHSA-f82x-m585-gj24](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-f82x-m585-gj24) (moderate) and [GHSA-v7fv-g69g-x7p2](https://github.com/DefectDojo/django-DefectDojo/security/advisories/GHSA-v7fv-g69g-x7p2) (high) to see what security issues were fixed in this release. These will be published and become visible at January 18th, 2022.
diff --git a/docs/content/en/getting_started/upgrading/2.7.md b/docs/content/en/getting_started/upgrading/2.7.md
new file mode 100644
index 00000000000..672c3a77a0f
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.7.md
@@ -0,0 +1,19 @@
+---
+title: "Upgrading to DefectDojo Version 2.7.x"
+toc_hide: true
+weight: -20220201
+description: breaking change
+---
+This release is a breaking change regarding the Choctaw Hog parser. As the maintainers of this project unified multiple parsers under the RustyHog parser, we now support the parsing of Choctaw Hog JSON output files through the Rusty Hog parser. Furthermore, we also support Gottingen Hog and Essex Hog JSON output files with the RustyHog parser.
+
+There is another breaking change regarding the import of SSLyze scans. The parser has been renamed from `SSLyze 3 Scan (JSON)` to `SSLyze Scan (JSON)`. The data in the database is fixed by the initializer, but it may break scripted API calls.
+
+Release 2.7.0 contains a beta functionality to make permissions for the configuration of DefectDojo more flexible. When the settings parameter `FEATURE_CONFIGURATION_AUTHORIZATION` is set to `True`, many configuration dialogues and API endpoints can be enabled for users or groups of users, regardless of their **Superuser** or **Staff** status, see [Configuration Permissions]({{< ref "../../usage/permissions/#configuration-permissions" >}}).
+
+The functionality using the flag `AUTHORIZATION_STAFF_OVERRIDE` has been removed. The same result can be achieved with giving the staff users a global Owner role.
+
+To support the transition for these 2 changes, you can run a migration script with ``./manage.py migrate_staff_users``. This script:
+
+* creates a group for all staff users,
+* sets all configuration permissions that staff users had and
+* sets the global Owner role, if `AUTHORIZATION_STAFF_OVERRIDE` is set to `True`.
diff --git a/docs/content/en/getting_started/upgrading/2.8.md b/docs/content/en/getting_started/upgrading/2.8.md
new file mode 100644
index 00000000000..06670847069
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.8.md
@@ -0,0 +1,15 @@
+---
+title: "Upgrading to DefectDojo Version 2.8.x"
+toc_hide: true
+weight: -20220301
+description: breaking changes
+---
+**Breaking change for Docker Compose:** Starting DefectDojo with Docker Compose now supports 2 databases (MySQL and PostgreSQL) and 2 celery brokers (RabbitMQ and Redis). To make this possible, docker-compose needs to be started with the parameters `--profile` and `--env-file`. You can get more information in [Setup via Docker Compose - Profiles](https://github.com/DefectDojo/django-DefectDojo/blob/master/readme-docs/DOCKER.md#setup-via-docker-compose---profiles). The profile `mysql-rabbitmq` provides the same configuration as in previous releases. With this the prerequisites have changed as well: Docker requires at least version 19.03.0 and Docker Compose 1.28.0.
+
+**Breaking change for Helm Chart:** In one of the last releases we upgraded the redis dependency in our helm chart without renaming keys in our helm chart. We fixed this bug with this release, but you may want to check if all redis values are correct ([Pull Request](https://github.com/DefectDojo/django-DefectDojo/pull/5886)).
+
+The flexible permissions for the configuration of DefectDojo are now active by default. With this, the flag **Staff** for users is not relevant and not visible anymore. The old behaviour can still be activated by setting the parameter `FEATURE_CONFIGURATION_AUTHORIZATION` to `False`. If you haven't done so with the previous release, you can still run a migration script with `./manage.py migrate_staff_users`. This script:
+
+* creates a group for all staff users,
+* sets all configuration permissions that staff users had and
+* sets the global Owner role, if `AUTHORIZATION_STAFF_OVERRIDE` is set to `True`.
diff --git a/docs/content/en/getting_started/upgrading/2.9.md b/docs/content/en/getting_started/upgrading/2.9.md
new file mode 100644
index 00000000000..fb58a96a23b
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/2.9.md
@@ -0,0 +1,8 @@
+---
+title: "Upgrading to DefectDojo Version 2.9.x"
+toc_hide: true
+weight: -20220406
+description: breaking change for APIv2
+---
+**Breaking change for APIv2:** `configuration_url` was removed from API endpoint `/api/v2/tool_configurations/` due to redundancy.
+
diff --git a/docs/content/en/getting_started/upgrading/_index.md b/docs/content/en/getting_started/upgrading/_index.md
new file mode 100644
index 00000000000..5a9398a0d86
--- /dev/null
+++ b/docs/content/en/getting_started/upgrading/_index.md
@@ -0,0 +1,73 @@
+---
+title: "Upgrading"
+description: "Release specific upgrading instructions"
+draft: false
+weight: 5
+---
+
+## Docker-compose
+
+When you deploy a vanilla docker-compose, it will create a persistent
+volume for your MySQL database. As long as your volume is there, you
+should not lose any data.
+
+### Using docker images provided in DockerHub
+
+{{% alert title="Information" color="info" %}}
+If you\'re using `latest`, then you need to pre pull the `latest` from
+DockerHub to update.
+{{% /alert %}}
+
+
+The generic upgrade method for docker-compose are as follows:
+- Pull the latest version
+
+ ``` {.sourceCode .bash}
+ docker pull defectdojo/defectdojo-django:latest
+ docker pull defectdojo/defectdojo-nginx:latest
+ ```
+
+- If you would like to use a version other than the latest, specify the version (tag) you want to upgrade to:
+
+ ``` {.sourceCode .bash}
+ docker pull defectdojo/defectdojo-django:1.10.2
+ docker pull defectdojo/defectdojo-nginx:1.10.2
+ ```
+
+- If you would like to use alpine based images, you specify the version (tag) you want to upgrade to:
+
+ ``` {.sourceCode .bash}
+ docker pull defectdojo/defectdojo-django:1.10.2-alpine
+ docker pull defectdojo/defectdojo-nginx:1.10.2-alpine
+ ```
+
+- Go to the directory where your docker-compose.yml file lives
+- Stop DefectDojo: `./dc-stop.sh`
+- Re-start DefectDojo, allowing for container recreation:
+ `./dc-up-d.sh`
+- Database migrations will be run automatically by the initializer.
+ Check the output via `docker-compose logs initializer` or relevant k8s command
+- If you have the initializer disabled (or if you want to be on the
+ safe side), run the migration command:
+ `docker-compose exec uwsgi /bin/bash -c "python manage.py migrate"`
+
+### Building your local images
+
+If you build your images locally and do not use the ones from DockerHub,
+the instructions are the same, with the caveat that you must build your images
+first.
+- Pull the latest DefectDojo changes
+
+ ``` {.sourceCode .bash}
+ git fetch
+ git pull
+ git merge origin/master
+ ```
+
+Then replace the first step of the above generic upgrade method for docker-compose with: `docker-compose build`
+
+## godojo installations
+
+If you have installed DefectDojo on "iron" and wish to upgrade the installation, please see the [instructions in the repo](https://github.com/DefectDojo/godojo/blob/master/docs-and-scripts/upgrading.md).
+
+## Upgrade notes for each release
diff --git a/docs/content/en/integrations/api-v2-docs.md b/docs/content/en/integrations/api-v2-docs.md
index 72bd785de98..c64dfcc8919 100644
--- a/docs/content/en/integrations/api-v2-docs.md
+++ b/docs/content/en/integrations/api-v2-docs.md
@@ -12,7 +12,7 @@ DefectDojo\'s API is created using [Django Rest
Framework](http://www.django-rest-framework.org/). The documentation of
each endpoint is available within each DefectDojo installation at
[`/api/v2/doc/`](https://demo.defectdojo.org/api/v2/) and can be accessed by choosing the API v2
-Docs link on the user drop down menu in the header.
+Docs link on the user drop down menu in the header.
![image](../../images/api_v2_1.png)
@@ -20,10 +20,10 @@ The documentation is generated using [Django Rest Framework
Yet Another Swagger Generator](https://github.com/axnsan12/drf-yasg/), and is
interactive. On the top of API v2 docs is a link that generates an OpenAPI v2 spec.
-As a preparation to move to OpenAPIv3, we have added an compatible spec and documentation at [`/api/v2/oa3/swagger-ui/`](https://demo.defectdojo.org/api/v2/oa3/swagger-ui/?docExpansion=none)
+As a preparation to move to OpenAPIv3, we have added an compatible spec and documentation at [`/api/v2/oa3/swagger-ui/`](https://demo.defectdojo.org/api/v2/oa3/swagger-ui/)
To interact with the documentation, a valid Authorization header value
-is needed. Visit the `/api/v2/key/` view to generate your
+is needed. Visit the `/api/key-v2` view to generate your
API Key (`Token `) and copy the header value provided.
![image](../../images/api_v2_2.png)
@@ -35,8 +35,7 @@ URL, Response Body, Response Code and Response Headers.
If you're logged in to the Defect Dojo web UI, you do not need to provide the authorization token.
-Authentication
---------------
+## Authentication
The API uses header authentication with API key. The format of the
header should be: :
@@ -47,8 +46,12 @@ For example: :
Authorization: Token c8572a5adf107a693aa6c72584da31f4d1f1dcff
-Sample Code
------------
+### Alternative authentication method
+
+If you use [an alternative authentication method](../social-authentication/) for users, you may want to disable DefectDojo API tokens because it could bypass your authentication concept. \
+Using of DefectDojo API tokens can be disabled by specifying the environment variable `DD_API_TOKENS_ENABLED` to `False`.
+
+## Sample Code
Here are some simple python examples and their results produced against
the `/users` endpoint: :
@@ -61,10 +64,9 @@ headers = {'content-type': 'application/json',
'Authorization': 'Token c8572a5adf107a693aa6c72584da31f4d1f1dcff'}
r = requests.get(url, headers=headers, verify=True) # set verify to False if ssl cert is self-signed
-for key, value in r.__dict__.iteritems():
- print key
- print value
- print '------------------'
+for key, value in r.__dict__.items():
+ print(f"'{key}': '{value}'")
+ print('------------------')
{{< /highlight >}}
This code will return the list of all the users defined in DefectDojo.
@@ -101,10 +103,9 @@ headers = {'content-type': 'application/json',
'Authorization': 'Token c8572a5adf107a693aa6c72584da31f4d1f1dcff'}
r = requests.get(url, headers=headers, verify=True) # set verify to False if ssl cert is self-signed
-for key, value in r.__dict__.iteritems():
- print key
- print value
- print '------------------'
+for key, value in r.__dict__.items():
+ print(f"'{key}': '{value}'")
+ print('------------------')
{{< /highlight >}}
The json object result is: :
@@ -132,8 +133,7 @@ See [Django Rest Framework\'s documentation on interacting with an
API](http://www.django-rest-framework.org/topics/api-clients/) for
additional examples and tips.
-Manually calling the API
-------------------------
+## Manually calling the API
Tools like Postman can be used for testing the API.
@@ -172,8 +172,7 @@ Example for importing a scan result:
- Click send
-Clients / API Wrappers
-----------------------
+## Clients / API Wrappers
| Wrapper | Status | Notes |
| -----------------------------| ------------------------| ------------------------|
@@ -184,4 +183,4 @@ Clients / API Wrappers
| [.Net/C# library](https://www.nuget.org/packages/DefectDojo.Api/) | working (2021-06-08) | |
| [dd-import](https://github.com/MaibornWolff/dd-import) | working (2021-08-24) | dd-import is not directly an API wrapper. It offers some convenience functions to make it easier to import findings and language data from CI/CD pipelines. |
-Some of the api wrappers contain quite a bit of logic to ease scanning and importing in CI/CD environments. We are in the process of simplifying this by making the DefectDojo API smarter (so api wrappers / script can be dumber).
\ No newline at end of file
+Some of the api wrappers contain quite a bit of logic to ease scanning and importing in CI/CD environments. We are in the process of simplifying this by making the DefectDojo API smarter (so api wrappers / script can be dumber).
diff --git a/docs/content/en/integrations/burp-plugin.md b/docs/content/en/integrations/burp-plugin.md
index bcec96953e6..400b37c0f2a 100644
--- a/docs/content/en/integrations/burp-plugin.md
+++ b/docs/content/en/integrations/burp-plugin.md
@@ -5,6 +5,9 @@ draft: false
weight: 8
---
+**Please note: The DefectDojo Burp Plugin has been sunset and is no longer a supported feature.**
+
+Burp is still a supported tool, and all the results from it can be imported into DefectDojo. Burp can produce XML reports and these can be uploaded to DefectDojo using the graphical user interface or the API. Our documentation at https://documentation.defectdojo.com/integrations/parsers/file/burp/ describes this usage.
This is Burp Plugin to export findings directly to DefectDojo.
diff --git a/docs/content/en/integrations/importing.md b/docs/content/en/integrations/importing.md
index 22110fe4925..20590ee1f71 100644
--- a/docs/content/en/integrations/importing.md
+++ b/docs/content/en/integrations/importing.md
@@ -1,6 +1,6 @@
---
title: "Importing"
-description: "DefectDojo has the ability to import scan reports from a large number of security tools."
+description: "How DefectDojo imports and reimports security tool reports."
draft: false
weight: 1
---
@@ -13,7 +13,7 @@ individual hosts vulnerable.
![Import Form](../../images/imp_1.png)
-This approach will create a new Test for each upload. This can result a lot of findings. If deduplication is enabled, new findings that are identical to existing findings get marked as a duplicate.
+This approach will create a new Test for each upload. This can result in a lot of findings. If deduplication is enabled, new Findings that are identical to existing Findings get marked as a duplicate.
## Reimport
@@ -36,8 +36,15 @@ The history of a test will be shown with the delta's for each reimported scan re
Clicking on a reimport changset will show the affected findings, as well as a status history per finding.
![Import History details](../../images/import_history_details1.png)
+### Triage-less scanners
+Some scanners might not include triage information in their reports (e.g. tfsec). They simply scan code or dependencies, flag issues, and return everything. Removing some findings requires you to add comments in your code perhaps, but there is no simple way to filter out findings from the reports.
+
+That is why DefectDojo also includes a "Do not reactivate" checkbox in uploading reports (also in the reimport API), so you can persist the triages that have been done in Defectdojo without reactivating Findings on every upload.
+
+For context, see [#6892](https://github.com/DefectDojo/django-DefectDojo/issues/6892)
+
# API
-This section focuses on Import and Reimport via the API. Please see the [full documentation defails of all API Endpoints](../api-v2-docs/) for more details.
+This section focuses on Import and Reimport via the API. Please see the [full documentation details of all API Endpoints](../api-v2-docs/) for more details.
Reimport is actually the easiest way to get started as it will create any entities on the fly if needed and it will automatically detect if it is a first time upload or a re-upload.
## Import
@@ -52,13 +59,13 @@ An import can be performed by specifying the names of these entities in the API
{
"minimum_severity": 'Info',
"active": True,
- "verified": Trued,
+ "verified": True,
"scan_type": 'ZAP Scan',
"test_title": 'Manual ZAP Scan by John',
"product_type_name": 'Good Products',
"product_name": 'My little product',
"engagement_name": 'Important import',
- "auto_create_contex": True,
+ "auto_create_context": True,
}
```
@@ -70,7 +77,7 @@ A classic way of importing a scan is by specifying the ID of the engagement inst
{
"minimum_severity": 'Info',
"active": True,
- "verified": Trued,
+ "verified": True,
"scan_type": 'ZAP Scan',
"test_title": 'Manual ZAP Scan by John',
"engagement": 123,
@@ -81,28 +88,31 @@ A classic way of importing a scan is by specifying the ID of the engagement inst
## Reimport
ReImporting via the API is performed via the [reimport-scan](https://demo.defectdojo.org/api/v2/doc/) endpoint.
-An reimport can be performed by specifying the names of these entities in the API request:
+A reimport can be performed by specifying the names of these entities in the API request:
```JSON
{
"minimum_severity": 'Info',
"active": True,
- "verified": Trued,
+ "verified": True,
"scan_type": 'ZAP Scan',
"test_title": 'Manual ZAP Scan by John',
"product_type_name": 'Good Products',
"product_name": 'My little product',
"engagement_name": 'Important import',
- "auto_create_contex": True,
+ "auto_create_context": True,
+ "do_not_reactivate": False,
}
```
When `auto_create_context` is `True`, the product and engagement will be created if needed. Make sure your user has sufficient [permissions](../usage/permissions) to do this.
-A Reimport will automatically select the latest test inside the provided engagement that satisifes the provided `scan_type` and (optionally) provided `test_title`
+When `do_not_reactivate` is `True`, the importing/reimporting will ignore uploaded active findings and not reactivate previously closed findings, while still creating new findings if there are new ones. You will get a note on the finding to explain that it was not reactivated for that reason.
-If no existing Test is found, the reimport endpoint will use the import function to import the provided report into a new Test. This means a (CI/CD) script using the API doesn't need to know if a Test already exist, or if it is a first time upload for this product / engagement.
+A reimport will automatically select the latest test inside the provided engagement that satisifes the provided `scan_type` and (optionally) provided `test_title`.
+
+If no existing Test is found, the reimport endpoint will use the import function to import the provided report into a new Test. This means a (CI/CD) script using the API doesn't need to know if a Test already exists, or if it is a first time upload for this Product / Engagement.
A classic way of reimporting a scan is by specifying the ID of the test instead:
@@ -110,7 +120,7 @@ A classic way of reimporting a scan is by specifying the ID of the test instead:
{
"minimum_severity": 'Info',
"active": True,
- "verified": Trued,
+ "verified": True,
"scan_type": 'ZAP Scan',
"test": 123,
}
@@ -133,4 +143,4 @@ Here are the following use cases for using this field:
3. The report **does not** set the date, and the `scan_date` is **set** at import
- Finding date will be whatever the user set for `scan_date`
4. The report **sets** the date, and the `scan_date` is **set** at import
- - Finding date will be whatever the user set for `scan_date`
\ No newline at end of file
+ - Finding date will be whatever the user set for `scan_date`
diff --git a/docs/content/en/integrations/jira.md b/docs/content/en/integrations/jira.md
index f4c66c76634..e7a19329bd4 100644
--- a/docs/content/en/integrations/jira.md
+++ b/docs/content/en/integrations/jira.md
@@ -28,7 +28,8 @@ Enabling the Webhook
1. Visit \<**YOUR JIRA URL**\>/plugins/servlet/webhooks
2. Click \'Create a Webhook\'
3. For the field labeled \'URL\' enter: \<**YOUR DOJO
- DOMAIN**\>/webhook/
+ DOMAIN**\>/jira/webhook/<**YOUR GENERATED WEBHOOK SECRET**>
+ This value can be found under Defect Dojo System settings
4. Under \'Comments\' enable \'Created\'. Under Issue enable
\'Updated\'.
@@ -38,15 +39,21 @@ Configurations in Dojo
1. Navigate to the System Settings from the menu on the left side
or by directly visiting \/system\_settings.
2. Enable \'Enable JIRA integration\' and click submit.
+3. For the webhook created in Enabling the Webhook, enable
+ \'Enable JIRA web hook\' and click submit.
Adding JIRA to Dojo
-------------------
1. Click \'JIRA\' from the left hand menu.
2. Select \'Add Configuration\' from the drop-down.
-3. If you use Jira Cloud, you will need to generate an [API token
- for Jira](https://id.atlassian.com/manage/api-tokens) to use as
- the password
+3. For JIRA Server:
+
+ Enter the _Username_ & _Password_. A _Username_ and JIRA _Personal Access Token_ will not necessarily work.
+
+ For JIRA Cloud:
+
+ Enter _Email Address_ & [API token for Jira](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/)
4. To obtain the \'open status key\' and \'closed status key\'
visit \<**YOUR JIRA
URL**\>/rest/api/latest/issue/\<**ANY VALID ISSUE
@@ -66,7 +73,7 @@ To obtain \'epic name id\': If you have admin access to JIRA:
4. **Note**: dojojira uses the same celery functionality as
reports. Make sure the celery runner is setup correctly as
described:
-
+
Or
diff --git a/docs/content/en/integrations/ldap-authentication.md b/docs/content/en/integrations/ldap-authentication.md
new file mode 100644
index 00000000000..2fcf895e12b
--- /dev/null
+++ b/docs/content/en/integrations/ldap-authentication.md
@@ -0,0 +1,129 @@
+---
+title: "Authentication via LDAP"
+description: "Authenticate users using LDAP"
+draft: false
+weight: 4
+---
+
+## LDAP Authentication
+
+Out of the box Defect Dojo does not support LDAP authentication.
+
+*However*, since Defect Dojo is built using Django, it isn't too difficult to add support for LDAP.
+So long as you don't mind building your own Docker images...
+
+We will need to modify a grand total of 4-5 files, depending on how you want to pass Dojo your LDAP secrets.
+
+ - Dockerfile.django-*
+ - Dockerfile.nginx-*
+ - requirements.txt
+ - settings.dist.py
+ - docker-compose.yml *(Optional)*
+
+
+#### Dockerfile modifications
+
+In both Dockerfile.django and Dockerfile.nginx, you want to add the following lines to the apt-get install layers:
+
+```bash
+libldap2-dev \
+libsasl2-dev \
+ldap-utils \
+```
+
+
+#### requirements.txt
+
+Please check for the latest version of these requirements at the time of implementation on pypi.org and use those if you can.
+
+- [https://pypi.org/project/python-ldap/](python-ldap)
+- [https://pypi.org/project/django-auth-ldap/](django-auth-ldap)
+
+Otherwise add the following to requirements.txt:
+
+```
+python-ldap==3.4.2
+django-auth-ldap==4.1.0
+```
+
+
+#### settings.dist.py
+
+Find the settings file (hint: `/dojo/settings/settings.dist.py`) and add the following:
+
+At the top of the file:
+```python
+import ldap
+from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
+```
+
+Then further down add LDAP settings to the env dict:
+```python
+# LDAP
+DD_LDAP_SERVER_URI=(str, 'ldap://ldap.example.com'),
+DD_LDAP_BIND_DN=(str, ''),
+DD_LDAP_BIND_PASSWORD=(str, ''),
+```
+
+Then under the env dict add:
+```python
+AUTH_LDAP_SERVER_URI = env('DD_LDAP_SERVER_URI')
+AUTH_LDAP_BIND_DN = env('DD_LDAP_BIND_DN')
+AUTH_LDAP_BIND_PASSWORD = env('DD_LDAP_BIND_PASSWORD')
+AUTH_LDAP_USER_SEARCH = LDAPSearch(
+ "ou=Groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)"
+)
+
+AUTH_LDAP_USER_ATTR_MAP = {
+ "first_name": "givenName",
+ "last_name": "sn",
+ "email": "mail",
+}
+```
+Please make sure to customise all of the LDAP search variables to match your company's configuration.
+
+
+For additional group controls you can add:
+```python
+# Set up the basic group parameters.
+AUTH_LDAP_GROUP_SEARCH = LDAPSearch(
+ "dc=example,dc=com",
+ ldap.SCOPE_SUBTREE,
+ "(objectClass=groupOfNames)",
+)
+AUTH_LDAP_GROUP_TYPE = GroupOfNamesType(name_attr="cn")
+
+# Simple group restrictions
+AUTH_LDAP_REQUIRE_GROUP = "cn=DD_USER_ACTIVE,ou=Groups,dc=example,dc=com"
+
+AUTH_LDAP_USER_FLAGS_BY_GROUP = {
+ "is_active": "cn=DD_USER_ACTIVE,ou=Groups,dc=example,dc=com",
+ "is_staff": "cn=DD_USER_STAFF,ou=Groups,dc=example,dc=com",
+ "is_superuser": "cn=DD_USER_ADMIN,ou=Groups,dc=example,dc=com",
+}
+```
+
+Then also add `'django_auth_ldap.backend.LDAPBackend'` to the `AUTHENTICATION_BACKENDS` variable, for example:
+```python
+AUTHENTICATION_BACKENDS = (
+ 'django_auth_ldap.backend.LDAPBackend',
+ 'django.contrib.auth.backends.RemoteUserBackend',
+ 'django.contrib.auth.backends.ModelBackend',
+)
+```
+
+Read the docs for Django Authentication with LDAP here: https://django-auth-ldap.readthedocs.io/en/latest/
+
+#### docker-compose.yml
+
+In order to pass the variables to the settings.dist.py file via docker, it's a good idea to add these to the docker-compose file.
+
+You can do this by adding the following variables to the environment section for the uwsgi image:
+```
+DD_LDAP_SERVER_URI: "${DD_LDAP_SERVER_URI:-ldap://ldap.example.com}"
+DD_LDAP_BIND_DN: "${DD_LDAP_BIND_DN:-}"
+DD_LDAP_BIND_PASSWORD: "${DD_LDAP_BIND_PASSWORD:-}"
+```
+
+Alternatively you can set these values in a local_settings.py file.
+
diff --git a/docs/content/en/integrations/notifications.md b/docs/content/en/integrations/notifications.md
index 9011fde656f..648d8ff1bf8 100644
--- a/docs/content/en/integrations/notifications.md
+++ b/docs/content/en/integrations/notifications.md
@@ -16,9 +16,9 @@ and more.
The following notification methods currently exist:
- Email
- - Slack
+ - Slack
- Microsoft Teams
- - Alerts within DefectDojo
+ - Alerts within DefectDojo (default)
You can set these notifications on a global scope (if you have
administrator rights) or on a personal scope. For instance, an
@@ -27,31 +27,90 @@ to a certain Slack channel, whereas an individual user wants email
notifications to be sent to the user\'s specified email address when a
report has finished generating.
-Microsoft Teams does not provide an easy way to send messages to a personal
-channel. Therefore, DefectDojo can only send system scope notifications
-to Microsoft Teams.
+Users can define notifications on a product level as well, and these settings will be applied only for selected products.
In order to identify and notify you about things like upcoming
engagements, DefectDojo runs scheduled tasks for this purpose. These
tasks are scheduled and run using Celery beat, so this needs to run for
those notifications to work.
+DefectDojo allows `template` to be used, administrator can use this feature to define which notification should be received by newly created users.
+
### Slack
+#### Basic Integration
+This method will allow DefectDojo to send Global notifications to a Slack channel. It can also send Personal notifications to an individual user's Slackbot.
+
+To configure Slack messaging, you will first need to create a new Slack app at https://api.slack.com/apps.
+
+This app can be created from scratch, or from a JSON manifest which includes all necessary scopes and bot functionality. This manifest can be copied and pasted into the Slack App wizard when you select 'Build From Manifest'.
+
+
+ JSON Manifest
+
+~~~
+{
+ "_metadata": {
+ "major_version": 1,
+ "minor_version": 1
+ },
+ "display_information": {
+ "name": "DefectDojo",
+ "description": "Notifications from DefectDojo",
+ "background_color": "#0000AA"
+ },
+ "features": {
+ "bot_user": {
+ "display_name": "DefectDojo Notifications"
+ }
+ },
+ "oauth_config": {
+ "scopes": {
+ "bot": [
+ "chat:write",
+ "chat:write.customize",
+ "chat:write.public",
+ "incoming-webhook",
+ "users:read",
+ "users:read.email"
+ ]
+ },
+ "redirect_urls": [
+ "https://slack.com/oauth/v2/authorize"
+ ]
+ }
+}
+~~~
+
+
+
+Choose the channel where you want to post Global notifications during the 'Create From Manifest' process. Personal notifications will appear in a user's Slackbot if they have their Slack Email Address specified on their user profile.
+
#### Scopes
-The following scopes have to be granted.
+The following scopes have to be granted to your Slack App. If the App was created from the JSON Manifest above, these permission scopes will already be set correctly.
![Slack OAuth scopes](../../images/slack_scopes.png)
#### Token
-The bot token has to be chosen and put in your System Settings
+The Slack Bot Token needs to be pasted in the DefectDojo System Settings, nested underneath the 'Enable slack notifications' checkbox. This token can be found in the Features / OAuth & Permissions section on the Slack App settings.
![Slack token](../../images/slack_tokens.png)
+#### Examples of Slack notifications
+
+![Add Product](../../images/slack_add_product.png)
+
+![Import Scan](../../images/slack_import_scan.png)
+
+
### Microsoft Teams
+Microsoft Teams does not provide an easy way to send messages to a personal
+channel. Therefore, DefectDojo can only send system scope notifications
+to Microsoft Teams.
+
To activate notifications to Microsoft Teams, you have to:
- Configure an Incoming Webhook in a Teams channel and copy the URL of the webhook to the clipboard
- Activate `Enable Microsoft Teams notifications` in the System Settings
diff --git a/docs/content/en/integrations/parsers.md b/docs/content/en/integrations/parsers.md
deleted file mode 100644
index 71f54c2eedc..00000000000
--- a/docs/content/en/integrations/parsers.md
+++ /dev/null
@@ -1,1119 +0,0 @@
----
-title: "Supported reports"
-description: "DefectDojo has the ability to import scan reports from a large number of security tools."
-draft: false
-weight: 1
----
-
-## Security Tools
-
-### Acunetix Scanner
-
-XML format
-
-### Acunetix 360 Scanner
-
-Vulnerabilities List - JSON report
-
-### Anchore-Engine
-
-JSON vulnerability report generated by anchore-cli tool, using a command
-like `anchore-cli --json image vuln all`
-
-### Aqua
-
-JSON report format.
-
-### Anchore Grype
-
-Anchore Grype JSON report format generated with `-o json` option.
-
-{{< highlight bash >}}
-grype defectdojo/defectdojo-django:1.13.1 -o json > many_vulns.json
-{{< /highlight >}}
-
-### Arachni Scanner
-
-Arachni Web Scanner (http://arachni-scanner.com/wiki)
-
-Reports are generated with `arachni_reporter` tool this way:
-
-{{< highlight bash >}}
-arachni_reporter --reporter 'json' js.com.afr
-{{< /highlight >}}
-
-### AppSpider (Rapid7)
-
-Use the VulnerabilitiesSummary.xml file found in the zipped report
-download.
-
-### AuditJS (OSSIndex)
-
-AuditJS scanning tool using OSSIndex database and generated with `--json` or `-j` option ().
-
-{{< highlight bash >}}
-auditjs ossi --json > auditjs_report.json
-{{< /highlight >}}
-
-### AWS Security Hub
-
-The JSON output from AWS Security Hub exported with the `aws securityhub get-findings` ()
-command.
-
-### AWS Scout2 Scanner (deprecated)
-
-JS file in scout2-report/inc-awsconfig/aws\_config.js.
-
-{{% alert title="Warning" color="warning" %}}
-AWS Scout2 Scanner is deprecated and has been replaced with ScoutSuite (https://github.com/nccgroup/ScoutSuite) upstream.
-Please switch to the new parser for ScoutSuite.
-{{% /alert %}}
-
-{{% alert title="Warning" color="warning" %}}
-This parser is disactivated by default in releases >= 2.3.1 and will be removed in release >= 3.x.x.
-{{% /alert %}}
-
-### AWS Prowler Scanner
-
-Prowler file can be imported as a CSV (`-M csv`) or JSON (`-M json`) file.
-
-### Azure Security Center Recommendations Scan
-
-Azure Security Center recommendations can be exported from the user interface in CSV format.
-
-### Bandit
-
-JSON report format
-
-### Blackduck Hub
-
-2 options:
-
-* Import the zip file as can be created by Blackduck export.
-The zip file must contain the security.csv and files.csv in order to
-produce findings that bear file locations information.
-* Import a single security.csv file. Findings will not have any file location
-information.
-
-### Brakeman Scan
-
-Import Brakeman Scanner findings in JSON format.
-
-### Bugcrowd
-
-Import Bugcrowd results in CSV format.
-
-### Bundler-Audit
-
-Import the text output generated with bundle-audit check
-
-### Burp XML
-
-When the Burp report is generated, **the recommended option is Base64
-encoding both the request and response fields** - e.g. check the box
-that says \"Base64-encode requests and responses\". These fields will be
-processed and made available in the \'Finding View\' page.
-
-### Burp Enterprise Scan
-
-Import HTML reports from Burp Enterprise Edition
-
-### Burp GraphQL
-
-Import the JSON data returned from the BurpSuite Enterprise GraphQL API. Append all the
-issues returned to a list and save it as the value for the key "Issues". There is no need
-to filter duplicates, the parser will automatically combine issues with the same name.
-
-Example:
-
-{{< highlight json >}}
-{
- "Issues": [
- {
- "issue_type": {
- "name": "Cross-site scripting (reflected)",
- "description_html": "Issue Description",
- "remediation_html": "Issue Remediation",
- "vulnerability_classifications_html": "CWE-79: Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')",
- "references_html": "Cross-site scripting"
- },
- "description_html": "Details",
- "remediation_html": "Remediation Details",
- "severity": "high",
- "path": "/burp",
- "origin": "https://portswigger.net",
- "evidence": [
- {
- "request_index": 0,
- "request_segments": [
- {
- "data_html": "GET"
- },
- {
- "highlight_html": "data"
- },
- {
- "data_html": " HTTP More data"
- }
- ]
- },
- {
- "response_index": 0,
- "response_segments": [
- {
- "data_html": "HTTP/2 200 OK "
- },
- {
- "highlight_html": "data"
- },
- {
- "data_html": "More data"
- }
- ]
- }
- ]
- }
- ]
-}
-{{< /highlight >}}
-
-Example GraphQL query to get issue details:
-
-{{< highlight graphql >}}
- query Issue ($id: ID!, $serial_num: ID!) {
- issue(scan_id: $id, serial_number: $serial_num) {
- issue_type {
- name
- description_html
- remediation_html
- vulnerability_classifications_html
- references_html
- }
- description_html
- remediation_html
- severity
- path
- origin
- evidence {
- ... on Request {
- request_index
- request_segments {
- ... on DataSegment {
- data_html
- }
- ... on HighlightSegment {
- highlight_html
- }
- }
- }
- ... on Response {
- response_index
- response_segments {
- ... on DataSegment {
- data_html
- }
- ... on HighlightSegment {
- highlight_html
- }
- }
- }
- }
- }
- }
-{{< /highlight >}}
-
-
-### CargoAudit Scan
-
-Import JSON output of cargo-audit scan report
-
-### CCVS Report
-
-Import JSON reports from \[CCVS
-API\]()
-
-### Checkov Report
-
-Import JSON reports of Infrastructure as Code vulnerabilities.
-
-### Clair Scan
-
-Import JSON reports of Docker image vulnerabilities.
-
-### Clair Klar Scan
-
-Import JSON reports of Docker image vulnerabilities from clair klar
-client.
-
-### Cobalt.io Scan
-
-CSV Report
-
-### Cobalt.io API Import
-
-Import findings from the Cobalt.io API - no file required.
-
-Follow these steps to setup API importing:
-
-1. Configure the Cobalt.io Authentication details by navigating to
- Configuration / Tool Configuration, selecting the Tool Type to "Cobalt.io",
- and Authentication Type "API Key". Paste your Cobalt.io API key in the
- "API Key" field and the desired org token in the "Extras" field.
-2. In the Product settings select "Add API Scan Configuration" and select the
- previously added Cobalt.io Tool Configuration. Provide the ID
- of the asset from which to import findings in the field *Service key 1*.
- The ID can be found at the end of the URL when viewing the asset in your browser.
-3. After this is done, you can import the findings by selecting "Cobalt.io
- API Import" as the scan type. If you have more than one asset configured, you
- must also select which Cobalt.io API Scan Configuratio to use.
-
-### CodeQL
-CodeQL can be used to generate a SARIF report, that can be imported into Defect Dojo:
-
-`codeql database analyze db python-security-and-quality.qls --sarif-add-snippets --format=sarif-latest --output=security-extended.sarif`
-
-The same can be achieved by running the CodeQL GitHub action with the `add-snippet` property set to true.
-
-### Coverity API
-
-Export Coverity API view data in JSON format (`/api/viewContents/issues` endpoint).
-
-Currently these columns are mandatory:
- * `displayType` (`Type` in the UI)
- * `displayImpact` (`Impact` in the UI)
- * `status` (`Status` in the UI)
- * `firstDetected` (`First Detected` in the UI)
-
-Other supported attributes: `cwe`, `displayFile`, `occurrenceCount` and `firstDetected`
-
-### Crashtest Security
-
-Import JSON Report Import XML Report in JUnit Format
-
-### CredScan Report
-
-Import CSV credential scanner reports
-
-### Contrast Scanner
-
-CSV Report
-
-### Checkmarx
-
-- `Checkmarx Scan`, `Checkmarx Scan detailed`: XML report from Checkmarx SAST (source code analysis)
-- `Checkmarx OSA`: json report from Checkmarx Open Source Analysis (dependencies analysis)
-
-To generate the OSA report using Checkmarx CLI:
-`./runCxConsole.sh OsaScan -v -CxServer <...> -CxToken <..> -projectName <...> -enableOsa -OsaLocationPath -OsaJson `
-
-That will generate three files, two of which are needed for defectdojo. Build the file for defectdojo with the jq utility:
-`jq -s . CxOSAVulnerabilities.json CxOSALibraries.json`
-
-
-### Choctaw Hog parser
-
-From: Import the JSON output.
-
-### Cloudsploit (AquaSecurity)
-
-From: https://github.com/aquasecurity/cloudsploit . Import the JSON output.
-
-### CycloneDX
-
-CycloneDX is a lightweight software bill of materials (SBOM) standard designed for use in application security contexts and supply chain component analysis.
-
-From: https://www.cyclonedx.org/
-
-Example with Anchore Grype:
-
-{{< highlight bash >}}
-./grype defectdojo/defectdojo-django:1.13.1 -o cyclonedx > report.xml
-{{< /highlight >}}
-
-Example with `cyclonedx-bom` tool:
-
-{{< highlight bash >}}
-pip install cyclonedx-bom
-cyclonedx-py
-{{< /highlight >}}
-
-{{< highlight bash >}}
- Usage: cyclonedx-py [OPTIONS]
- Options:
- -i - the alternate filename to a frozen requirements.txt
- -o - the bom file to create
- -j - generate JSON instead of XML
-{{< /highlight >}}
-
-### DawnScanner
-
-Import report in JSON generated with -j option
-
-### Dependency Check
-
-OWASP Dependency Check output can be imported in Xml format. This parser ingests the vulnerable dependencies and inherits the suppressions.
-
-* Suppressed vulnerabilities are tagged with the tag: `suppressed`.
-* Suppressed vulnerabilities are marked as inactive, but not as mitigated.
-* If the suppression is missing any `` tag, it tags them as `no_suppression_document`.
-* Related vulnerable dependencies are tagged with `related` tag.
-
-### Dependency Track
-
-Dependency Track has implemented a DefectDojo integration. Information about
-how to configure the integration is documented here:
-https://docs.dependencytrack.org/integrations/defectdojo/
-
-Alternatively, the Finding Packaging Format (FPF) from OWASP Dependency Track can be
-imported in JSON format. See here for more info on this JSON format:
-
-
-### DrHeader
-
-Import of JSON report from
-
-
-### Dockle Report
-
-Import JSON container image linter reports
-
-
-### Detect-secrets
-
-Import of JSON report from
-
-### ESLint
-
-ESLint Json report format (-f json)
-
-### Fortify
-
-Import Findings from XML file format.
-
-### Generic Findings Import
-
-Import Generic findings in CSV or JSON format.
-
-Attributes supported for CSV:
-- Date: Date of the finding in mm/dd/yyyy format.
-- Title: Title of the finding
-- CweId: Cwe identifier, must be an integer value.
-- Url: Url associated with the finding.
-- Severity: Severity of the finding. Must be one of Info, Low, Medium, High, or Critical.
-- Description: Description of the finding. Can be multiple lines if enclosed in double quotes.
-- Mitigation: Possible Mitigations for the finding. Can be multiple lines if enclosed in double quotes.
-- Impact: Detailed impact of the finding. Can be multiple lines if enclosed in double quotes.
-- References: References associated with the finding. Can be multiple lines if enclosed in double quotes.
-- Active: Indicator if the finding is active. Must be empty, TRUE or FALSE
-- Verified: Indicator if the finding has been verified. Must be empty, TRUE, or FALSE
-- FalsePositive: Indicator if the finding is a false positive. Must be TRUE, or FALSE.
-- Duplicate:Indicator if the finding is a duplicate. Must be TRUE, or FALSE
-
-The CSV expects a header row with the names of the attributes.
-
-Example of JSON format:
-
-```JSON
-{
- "findings": [
- {
- "title": "test title with endpoints as dict",
- "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau",
- "severity": "Medium",
- "mitigation": "Some mitigation",
- "date": "2021-01-06",
- "cve": "CVE-2020-36234",
- "cwe": 261,
- "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
- "file_path": "src/first.cpp",
- "line": 13,
- "endpoints": [
- {
- "host": "exemple.com"
- }
- ]
- },
- {
- "title": "test title with endpoints as strings",
- "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2",
- "severity": "Critical",
- "mitigation": "Some mitigation",
- "date": "2021-01-06",
- "cve": "CVE-2020-36235",
- "cwe": 287,
- "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
- "file_path": "src/two.cpp",
- "line": 135,
- "endpoints": [
- "http://urlfiltering.paloaltonetworks.com/test-command-and-control",
- "https://urlfiltering.paloaltonetworks.com:2345/test-pest"
- ]
- },
- {
- "title": "test title",
- "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2",
- "severity": "Critical",
- "mitigation": "Some mitigation",
- "date": "2021-01-06",
- "cve": "CVE-2020-36236",
- "cwe": 287,
- "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
- "file_path": "src/threeeeeeeeee.cpp",
- "line": 1353
- }
- ]
-}
-```
-
-This parser support an attributes that accept files as Base64 strings. These files are attached to the respective findings.
-
-Example:
-
-```JSON
-{
- "title": "My wonderful report",
- "findings": [
- {
- "title": "Vuln with image",
- "description": "Some very long description",
- "severity": "Medium",
- "files": [
- {
- "title": "Screenshot from 2017-04-10 16-54-19.png",
- "data": "iVBORw0KGgoAAAANSUhEUgAABWgAAAK0CAIAAAARSkPJAAAAA3N<...>TkSuQmCC"
- }
- ]
- }
- ]
-}
-```
-
-### Gosec Scanner
-
-Import Gosec Scanner findings in JSON format.
-
-### Gitleaks
-
-Import Gitleaks findings in JSON format.
-
-### GitLab SAST Report
-
-Import SAST Report vulnerabilities in JSON format: https://docs.gitlab.com/ee/user/application_security/sast/#reports-json-format
-
-### GitLab Dependency Scanning Report
-
-Import Dependency Scanning Report vulnerabilities in JSON format: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#reports-json-format
-
-### Github Vulnerability
-
-Import findings from Github vulnerability scan:
-
-
-Currently the parser is able to manage only `RepositoryVulnerabilityAlert` object.
-The parser has some kind of search feature which detect the data in the report.
-
-Here is the mandatory objects and attributes:
-```
-vulnerabilityAlerts (RepositoryVulnerabilityAlert object)
- + id
- + createdAt (optional)
- + vulnerableManifestPath (optional)
- + securityVulnerability (SecurityVulnerability object)
- + severity (CRITICAL/HIGH/LOW/MODERATE)
- + package (optional)
- + name (optional)
- + advisory (SecurityAdvisory object)
- + description
- + summary
- + description
- + identifiers
- + value
- + references (optional)
- + url (optional)
- + cvss (optional)
- + vectorString (optional)
-```
-
-References:
- - https://docs.github.com/en/graphql/reference/objects#repositoryvulnerabilityalert
- - https://docs.github.com/en/graphql/reference/objects#securityvulnerability
-
-Github v4 graphql query to fetch data:
-
-{{< highlight graphql >}}
- query getVulnerabilitiesByOwner($owner: String!) {
- search(query: $owner, type: REPOSITORY, first: 100) {
- nodes {
- ... on Repository {
- name
- vulnerabilityAlerts(last: 100) {
- nodes {
- id
- securityVulnerability {
- severity
- package {
- name
- }
- advisory {
- description
- summary
- identifiers {
- type
- value
- }
- references {
- url
- }
- }
- }
- }
- }
- }
- }
- }
- }
-{{< /highlight >}}
-
-Another example of Python script that query one repository:
-
-```python
-
-import json
-import requests
-
-
-query = """
-query getVulnerabilitiesByRepoAndOwner($name: String!, $owner: String!) {
- repository(name: $name, owner: $owner) {
- vulnerabilityAlerts(first: 100) {
- nodes {
- id
- createdAt
- securityVulnerability {
- severity
- package {
- name
- ecosystem
- }
- advisory {
- description
- summary
- identifiers {
- value
- type
- }
- references {
- url
- }
- cvss {
- vectorString
- }
- }
- }
- vulnerableManifestPath
- }
- }
- }
-}
-"""
-
-token = '...' # generated from GitHub settings
-headers = {"Authorization": "Bearer " + token}
-
-
-request = requests.post(url='https://api.github.com/graphql',
- json={
- "operationName": "getVulnerabilitiesByRepoAndOwner",
- 'query': query,
- 'variables': {
- 'name': 'gogoph',
- 'owner': 'damiencarol'
- }
- },
- headers=headers)
-
-result = request.json()
-print(json.dumps(result, indent=2))
-```
-
-### Hadolint
-
-Hadolint Dockerfile scan in json format.
-
-### Harbor Vulnerability
-
-Import findings from Harbor registry container scan:
-
-
-### Horusec
-
-Import findings from Horusec scan.
-
-```shell
-./horusec_linux_x64 start -O=report.json -o json -i="tests/"
-```
-
-References:
- * [GitHub repository](https://github.com/ZupIT/horusec)
-
-### HuskyCI Report
-
-Import JSON reports from
-[HuskyCI]()
-
-### IBM AppScan DAST
-
-XML file from IBM App Scanner.
-
-### Immuniweb Scan
-
-XML Scan Result File from Immuniweb Scan.
-
-### IntSights Report
-
-IntSights Threat Command is a commercial Threat Intelligence platform that monitors both the open and dark web to identify threats for the Assets you care about (Domain Names, IP addresses, Brand Names, etc.).
-
-#### Manual Import
-Use the Export CSV feature in the IntSights Threat Command GUI to create an *IntSights Alerts.csv* file. This CSV
-file can then be imported into Defect Dojo.
-
-#### Automated Import
-
-The IntSights `get-complete-alert` API only returns details for a single alert. To automate the process,
-individually fetch details for each alert and append to a list. The list is then saved as the value for the key
-"Alerts". This JSON object can then be imported into Defect Dojo.
-
-Example:
-
- {
- "Alerts":[
- {
- "_id":"5c80egf83b4a3900078b6be6",
- "Details":{
- "Source":{
- "URL":"https://www.htbridge.com/websec/?id=ABCDEF",
- "Date":"2018-03-08T00:01:02.622Z",
- "Type":"Other",
- "NetworkType":"ClearWeb"
- },
- "Images":[
- "5c80egf833963a40007e01e8d",
- "5c80egf833b4a3900078b6bea",
- "5c80egf834626bd0007bd64db"
- ],
- "Title":"HTTP headers weakness in example.com web server",
- "Tags":[],
- "Type":"ExploitableData",
- "Severity":"Critical",
- "SubType":"VulnerabilityInTechnologyInUse",
- "Description":"X-XSS-PROTECTION and CONTENT-SECURITY-POLICY headers were not sent by the server, which makes it vulnerable for various attack vectors"
- },
- "Assignees":[
- "5c3c8f99903dfd0006ge5e61"
- ],
- "FoundDate":"2018-03-08T00:01:02.622Z",
- "Assets":[
- {
- "Type":"Domains",
- "Value":"example.com"
- }
- ],
- "TakedownStatus":"NotSent",
- "IsFlagged":false,
- "UpdateDate":"2018-03-08T00:01:02.622Z",
- "RelatedIocs":[],
- "RelatedThreatIDs":[],
- "Closed":{
- "IsClosed":false
- }
- }
- ]
- }
-
-### JFrogXRay
-
-Import the JSON format for the \"Security Export\" file. Use this importer for Xray version 2.X
-
-### JFrog XRay Unified
-
-Import the JSON format for the \"Security & Compliance | Reports\" export. Jfrog's Xray tool is an add-on to their Artifactory repository that does Software Composition Analysis, see https://www.jfrog.com/confluence/display/JFROG/JFrog+Xray for more information. \"Xray Unified\" refers to Xray Version 3.0 and later.
-
-### Kiuwan Scanner
-
-Import Kiuwan Scan in CSV format. Export as CSV Results on Kiuwan.
-
-### kube-bench Scanner
-
-Import JSON reports of Kubernetes CIS benchmark scans.
-
-### KICS Scanner
-
-Import of JSON report from
-
-### Meterian Scanner
-
-The Meterian JSON report output file can be imported.
-
-### Microfocus Webinspect Scanner
-
-Import XML report
-
-### MobSF Scanner
-
-Export a JSON file using the API, api/v1/report\_json.
-
-### Mobsfscan
-
-Import JSON report from
-
-### Mozilla Observatory Scanner
-
-Import JSON report.
-
-### Nessus (Tenable)
-
-Reports can be imported in the CSV, and .nessus (XML) report formats.
-
-### Nessus WAS (Tenable)
-
-Reports can be imported in the CSV, and .nessus (XML) report formats.
-
-### Netsparker
-
-Vulnerabilities List - JSON report
-
-### Nexpose XML 2.0 (Rapid7)
-
-Use the full XML export template from Nexpose.
-
-### Nikto
-
-Nikto web server scanner - https://cirt.net/Nikto2
-
-The current parser support 3 sources:
- - XML output (old)
- - new XML output (with nxvmlversion=\"1.2\" type)
- - JSON output
-
-See: https://github.com/sullo/nikto
-
-### Nmap
-
-XML output (use -oX)
-
-### Node Security Platform
-
-Node Security Platform (NSP) output file can be imported in JSON format.
-
-### NPM Audit
-
-Node Package Manager (NPM) Audit plugin output file can be imported in
-JSON format. Only imports the \'advisories\' subtree.
-
-### Nuclei
-
-Import JSON output of nuclei scan report
-
-### Openscap Vulnerability Scan
-
-Import Openscap Vulnerability Scan in XML formats.
-
-### OpenVAS CSV
-
-Import OpenVAS Scan in CSV format. Export as CSV Results on OpenVAS.
-
-### OssIndex Devaudit
-
-Import JSON formatted output from \[OSSIndex
-Devaudit\]().
-
-### Oss Review Toolkit
-
-Import ORT Evaluated model reporter in JSON Format.
-(Example)\[\]
-
-### PHP Security Audit v2
-
-Import PHP Security Audit v2 Scan in JSON format.
-
-### PHP Symfony Security Checker
-
-Import results from the PHP Symfony Security Checker.
-
-### Probely
-
-Synchronize Probely Plus findings with DefectDojo.
-
-To setup this integration set the DefectDojo URL and API key on the
-Integrations page on Probely. Then, select which Product, Engagement,
-and, optionally, the Test you want to synchronize to. The API key needs
-to belong to a staff user.
-
-Works with DefectDojo 1.5.x and 1.6.x. Probely also supports non-public
-DefectDojo instances.
-
-For detailed instructions on how to configure Probely and DefectDojo,
-see
-
-
-### Qualys Scan
-
-Qualys output files can be imported in API XML format. Qualys output
-files can be imported in WebGUI XML format.
-
-### Qualys Webapp Scan
-
-Qualys WebScan output files can be imported in XML format.
-
-### Retire.js
-
-Retire.js JavaScript scan (\--js) output file can be imported in JSON
-format.
-
-### Risk Recon API Importer
-
-Import findings from Risk Recon via the API. Configure your own JSON
-report as follows
-
-{{< highlight json >}}
-{
- "url_endpoint": "https://api.riskrecon.com/v1",
- "api_key": "you-api-key",
- "companies": [
- {
- "name": "Company 1",
- "filters": {
- "domain_name": [],
- "ip_address": ["127.0.0.1"],
- "host_name": ["localhost"],
- "asset_value": [],
- "severity": ["critical", "high"],
- "priority": [],
- "hosting_provider": [],
- "country_name": []
- }
- },
- {
- "name": "Company 2",
- "filters": {
- "ip_address": ["0.0.0.0"]
- }
- }
-
- ],
- "filters": {
- "domain_name": [],
- "ip_address": [],
- "host_name": [],
- "asset_value": [],
- "severity": ["critical"],
- "priority": [],
- "hosting_provider": [],
- "country_name": []
- }
-}
-{{< /highlight >}}
-
-- More than one company finding list can be queried with it\'s own set
- of filters. Company 1 shows all available fitlers, while Company 2
- shows that empty filters need not be present.
-- To query all companies in your Risk Recon instance, simple remove
- the \"companies\" field entirely.
-- If the \"companies\" field is not present, and filtering is still
- requested, the \"filters\" field can be used to filter all findings
- across all companies. It carries the same behavior as the company
- filters. The \"filters\" field is disregarded in the prescense of
- the \"companies\" field.
-- Removing both fields will allow retrieval of all findings in the
- Risk Recon instance.
-
-### SARIF
-
-OASIS Static Analysis Results Interchange Format (SARIF). SARIF is
-supported by many tools. More details about the format here:
-
-
-{{% alert title="Information" color="info" %}}
-SARIF parser customizes the Test_Type with data from the report.
-For example, a report with `Dockle` as a driver name will produce a Test with a Test_Type named `Dockle Scan (SARIF)`
-{{% /alert %}}
-
-{{% alert title="Warning" color="warning" %}}
-Current implementation is limited and will aggregate all the findings in the SARIF file in one single report.
-{{% /alert %}}
-
-### ScoutSuite
-
-Multi-Cloud security auditing tool. It uses APIs exposed by cloud
-providers. Scan results are located at
-`scan-reports/scoutsuite-results/scoutsuite\_\*.json` files.
-Multiple scans will create multiple files if they are runing agains
-different Cloud projects. See
-
-### Semgrep JSON Report
-
-Import Semgrep output (--json)
-
-### SKF Scan
-
-Output of SKF Sprint summary export.
-
-### Snyk
-
-Snyk output file (snyk test \--json \> snyk.json) can be imported in
-JSON format.
-
-### SonarQube Scan (Aggregates findings per cwe, title, description, file\_path.)
-
-SonarQube output file can be imported in HTML format.
-
-To generate the report, see
-
-
-Version: \>= 1.1.0
-
-### SonarQube Scan Detailed (Import all findings from SonarQube html report.)
-
-SonarQube output file can be imported in HTML format.
-
-To generate the report, see
-
-
-Version: \>= 1.1.0
-
-### SonarQube API Import
-
-SonarQube API will be accessed to gather the report. No report file is
-required.
-
-Follow these steps to setup the SonarQube API import:
-
-1. Configure the Sonarqube authentication details by navigating to
- Configuration / Tool Configuration. Note the url must be in the
- format of `https:///api`. Select the tool
- type to be SonarQube. By default the tool will import vulnerabilities issues
- and security hotspots only,
- but additional filters can be setup using the Extras field separated by
- commas (e.g. BUG,VULNERABILITY,CODE_SMELL)
-2. In the Product settings add an API Scan Configuration. *Service key 1* must
- be the SonarQube project key, which can be found by navigating to a specific project and
- selecting the value from the url
- `https:///dashboard?id=key`.
- When you do not provide a SonarQube project key, DefectDojo will
- use the name of the Product as the project key in SonarQube. If you would like to
- import findings from multiple projects, you can specify multiple keys as
- separated API Scan Configuration in the Product settings.
-3. Once all of the settings are made, the SonarQube API Import will be
- able to import all vulnerability information from the SonarQube
- instance. In the import or re-import dialog you can select which API Scan
- Configuration shall be used. If you do not choose
- any, DefectDojo will use the API Scan Configuration of the Product if there is
- only one defined or the SonarQube Tool Configuration if there is only one.
-
-**Note:**: If `https` is used for the SonarQube, the certificate must be
-trusted by the DefectDojo instance.
-
-
-### SpotBugs
-
-XML report of textui cli.
-
-### Sonatype
-
-JSON output.
-
-### SSL Labs
-
-JSON Output of ssllabs-scan cli.
-
-### Sslscan
-
-Import XML output of sslscan report.
-
-### Sslyze Scan
-
-XML report of SSLyze version 2 scan
-
-### SSLyze 3 Scan (JSON)
-
-JSON report of SSLyze version 3 scan
-
-### Testssl Scan
-
-Import CSV output of testssl scan report.
-
-### Terrascan
-
-Import JSON output of terrascan scan report
-
-### Trivy
-
-JSON report of [trivy scanner](https://github.com/aquasecurity/trivy).
-
-### Trufflehog
-
-JSON Output of Trufflehog.
-
-### Trustwave
-
-CSV output of Trustwave vulnerability scan.
-
-### Twistlock
-
-JSON output of the `twistcli` tool. Example:
-
-{{< highlight bash >}}
-./twistcli images scan --address https:// --user --details --output-file=
-{{< /highlight >}}
-
-The CSV output from the UI is now also accepted.
-
-### TFSec
-
-Import of JSON report from
-
-### Visual Code Grepper (VCG)
-VCG output can be imported in CSV or Xml formats.
-
-### Veracode
-
-Detailed XML Report
-
-### Wapiti Scan
-
-Import XML report.
-
-### Whitesource Scan
-
-Import JSON report
-
-### Wpscan Scanner
-
-Import JSON report.
-
-### Wfuzz JSON importer
-
-Import the result of Wfuzz (https://github.com/xmendez/wfuzz) if you export in JSON the result (`wfuzz -o json -f myJSONReport.json,json`).
-
-The return code matching are directly put in Severity as follow(this is hardcoded in the parser actually).
-
-HTTP Return Code | Severity
------------------|---------
-200 | High
-401 | Medium
-403 | Medium
-407 | Medium
-500 | Low
-
-### Xanitizer
-
-Import XML findings list report, preferably with parameter
-\'generateDetailsInFindingsListReport=true\'.
-
-### Yarn Audit
-
-Import Yarn Audit scan report in JSON format. Use something like `yarn audit --json > yarn_report.json`.
-
-### Zed Attack Proxy
-
-ZAP XML report format.
diff --git a/docs/content/en/integrations/parsers/_index.md b/docs/content/en/integrations/parsers/_index.md
new file mode 100644
index 00000000000..88f7d105bfa
--- /dev/null
+++ b/docs/content/en/integrations/parsers/_index.md
@@ -0,0 +1,6 @@
+---
+title: "Supported reports"
+description: "DefectDojo has the ability to import scan reports from a large number of security tools."
+draft: false
+weight: 1
+---
diff --git a/docs/content/en/integrations/parsers/api/_index.md b/docs/content/en/integrations/parsers/api/_index.md
new file mode 100644
index 00000000000..e25cd7f6f7c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/_index.md
@@ -0,0 +1,21 @@
+---
+title: "API Pull"
+description: "Report pulled to DefectDojo via API exposed by scanning service"
+weight: 2
+chapter: true
+---
+All parsers that use API pull have common basic configuration steps, but with different values.
+
+Follow these steps to set up API importing:
+
+1. Configure the API authentication details by navigating to
+ `Configuration -> Tool Configuration -> Add Tool Configuration`. Enter a `Name`,
+ selecting the related `Tool Type` and `Authentication Type` "API Key". Paste your credentials
+ to the proper fields based on definitions below.
+
+2. In the `Product` settings select `Add API Scan Configuration` and select the
+ previously added `Tool Configuration`. Provide values based on definitions below.
+
+3. After this is done, you can import the findings on the `Product` page through
+ `Findings -> Import Scan Results`. As the `Scan type`, select the related type,
+ the API scan configuration from the last step, and click `Import`.
diff --git a/docs/content/en/integrations/parsers/api/blackduck.md b/docs/content/en/integrations/parsers/api/blackduck.md
new file mode 100644
index 00000000000..36107b6b7c5
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/blackduck.md
@@ -0,0 +1,12 @@
+---
+title: "Blackduck API"
+toc_hide: true
+---
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+In `Tool Configuration`, select `Tool Type` to "BlackDuck API" and `Authentication Type` "API Key".
+Paste your BlackDuck API token in the `API Key` field.
+
+In `Add API Scan Configuration` provide the ID
+of the project from which to import findings in the field `Service key 1`.
+Provide the version of the project from which to import findings in the field `Service key 2`.
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/api/bugcrowd.md b/docs/content/en/integrations/parsers/api/bugcrowd.md
new file mode 100644
index 00000000000..1ebaebb3e4a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/bugcrowd.md
@@ -0,0 +1,17 @@
+---
+title: "Bugcrowd API"
+toc_hide: true
+---
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+In `Tool Configuration`, select `Tool Type` to "Bugcrowd API" and `Authentication Type` "API Key".
+Paste your BlackDuck API token in the `API Key` field.
+Set your API key directly in the format `username:password` in the API Token input, it will be added to the header `'Authorization': 'Token {}'.format(self.api_token),`
+
+For each product, you can configure 2 things:
+- `Service key 1`: the bugcrowd program code (it's the slug name in the url for the program, url safe)
+- `Service key 2`: the bugcrowd target name (the full name, it will be url-encoded, you can find it in https://tracker.bugcrowd.com//settings/scope/target_groups)
+ - It can be left empty so that all program submissions are imported
+
+That way, per product, you can use the same program but separate by target, which is a fairly common way of filtering/grouping Bugcrowd.
+Adding support for a 3rd filtering would be possible with `Service Key 3`, feel free to make a PR.
diff --git a/docs/content/en/integrations/parsers/api/cobalt.md b/docs/content/en/integrations/parsers/api/cobalt.md
new file mode 100644
index 00000000000..59615c5d1ea
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/cobalt.md
@@ -0,0 +1,16 @@
+---
+title: "Cobalt.io API Import
+"
+toc_hide: true
+---
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+In `Tool Configuration`, select `Tool Type` to "Cobalt.io" and `Authentication Type` "API Key".
+Paste your Cobalt.io API token in the `API Key` field and the desired org token in the `Extras` field.
+
+In `Add API Scan Configuration` provide the ID
+of the asset from which to import findings in the field `Service key 1`.
+The ID can be found at the end of the URL when viewing the asset in your browser.
+
+If you have more than one asset configured, you
+must also select which Cobalt.io API Scan Configuratio to use.
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/api/edgescan.md b/docs/content/en/integrations/parsers/api/edgescan.md
new file mode 100644
index 00000000000..e5f122b6649
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/edgescan.md
@@ -0,0 +1,38 @@
+---
+title: "Edgescan"
+toc_hide: true
+---
+Import Edgescan vulnerabilities by API or [JSON file](../../file/edgescan.md)
+
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+**Step 1: Add tool configuration**
+
+- Select the gear icon from the left hand side of the page.
+- Click on the `Tool Configuration` option and then `+ Add Tool Configuration` from the dropdown menu.
+- Once presented with a series of fields, set `Tool Type` to "Edgescan" and `Authentication Type` to "API Key".
+- Paste your Edgescan API key in the `API Key` field.
+- Click on the `Submit` button.
+
+**Step 2: Add and configure a product**
+
+- Select the hamburger menu icon from the left hand side of the page.
+- Click on the `All Products` option and then `+ Add Product`.
+- Fill in the fields presented.
+- Once the product is added, click on the `Settings` option then `Add API Scan Configuration`.
+- Select the previously added Edgescan `Tool Configuration`.
+- Provide the edgescan asset ID(s) that you wish to import the findings for in the field `Service key 1`.
+ - Note that multiple asset IDs should be comma separated with no spacing.
+ - If you want to import vulnerabilities for all assets, simply leave the Service key 1 field empty.
+
+**Step 3: Importing scan results**
+
+- After the previous steps are complete, you can import the findings by selecting the `Findings` option
+on the product's page and then `Import Scan Results`.
+- Once you are presented with a series of fields, select `Edgescan Scan` as the scan type.
+ - If you have more than one asset configured, you must also select which Edgescan `API Scan Configuration` to use.
+- Click on the `Import` button.
+
+**Important Reminder:**
+
+- To ensure you're not introducing duplicate vulnerabilities, always use the "Re-Upload Scan" option when re-importing findings from Edgescan. This can be found within the engagement's options by clicking on `Engagements` , then the active engagement in question, then `Edgescan Scan` and selecting "Re-Upload Scan" from the dropdown menu located on the right.
diff --git a/docs/content/en/integrations/parsers/api/sonarqube.md b/docs/content/en/integrations/parsers/api/sonarqube.md
new file mode 100644
index 00000000000..bd976a43fa8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/sonarqube.md
@@ -0,0 +1,46 @@
+---
+title: "SonarQube API Import"
+toc_hide: true
+---
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+In `Tool Configuration`, select `Tool Type` to "SonarQube" and `Authentication Type` "API Key".
+Note the url must be in the format of `https:///api`
+Paste your SonarQube API token in the "API Key" field.
+By default the tool will import vulnerabilities issues
+and security hotspots only, but additional filters can be setup using the
+Extras field separated by commas (e.g. `BUG,VULNERABILITY,CODE_SMELL`). When using
+SonarCloud, you must also specify the Organization ID in the Extras field as follows
+`OrgID=sonarcloud-organzation-ID`. If also specifying issue type filters, please
+seperate the items in the Extras field by a vertical bar as follows
+`BUG,VULNERABILITY,CODE_SMELL|OrgID=sonarcloud-organzation-ID`
+
+In "Add API Scan Configuration"
+- `Service key 1` must
+ be the SonarQube project key, which can be found by navigating to a specific project and
+ selecting the value from the url
+ `https:///dashboard?id=key`.
+ When you do not provide a SonarQube project key, DefectDojo will
+ use the name of the Product as the project key in SonarQube. If you would like to
+ import findings from multiple projects, you can specify multiple keys as
+ separated `API Scan Configuration` in the `Product` settings.
+- If using SonarCloud, the orginization ID can be used from step 1, but it
+ can be overiden by supplying a different orginization ID in the `Service key 2` input field.
+
+## Multiple SonarQube API Configurations
+
+In the import or re-import dialog you can select which `API Scan
+Configuration` shall be used. If you do not choose
+any, DefectDojo will use the `API Scan Configuration` of the Product if there is
+only one defined or the SonarQube `Tool Configuration` if there is only one.
+
+## Multi Branch Scanning
+
+If using a version of SonarQube with multi branch scanning, the branch tha be scanned can
+be supplied in the `branch tag` fieild at import/re-import time. If the branch does not exist,
+a notification will be generated in the alerts table indicating that branch to be imported
+does not exist. If a branch name is not supplied during import/re-import, the default branch
+of the SonarQube project will be used.
+
+**Note:**: If `https` is used for the SonarQube, the certificate must be
+trusted by the DefectDojo instance.
diff --git a/docs/content/en/integrations/parsers/api/vulners.md b/docs/content/en/integrations/parsers/api/vulners.md
new file mode 100644
index 00000000000..127919690c2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/api/vulners.md
@@ -0,0 +1,19 @@
+---
+title: "Vulners"
+toc_hide: true
+---
+All parsers which using API have common basic configuration step but with different values. Please, [read these steps](../) at first.
+
+Import Vulners [Audit](https://vulners.com/docs/API_wrapper/linux_audit/#linux-audit) results, no file required.
+
+In `Tool Configuration`, select `Tool Type` to "Vulners" and add the API Key
+
+In the `Product` settings select `Add API Scan Configuration` and select the previously added Vulners `API Tool Configuration`.
+
+After this is done, you can import the findings by selecting "Vulners" as the scan type.
+
+Detailed installation steps can be found in [vulners documentation](https://vulners.com/docs/plugins/defectdojo/).
+
+Use following [instructions](https://vulners.com/docs/apikey/) to generate Vulners API Key.
+
+More details about DefectDojo-plugin integration can be found at [vulners integrations page](https://vulners.com/plugins).
diff --git a/docs/content/en/integrations/parsers/file/_index.md b/docs/content/en/integrations/parsers/file/_index.md
new file mode 100644
index 00000000000..f567eacb2df
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/_index.md
@@ -0,0 +1,6 @@
+---
+title: "Files"
+description: "Report uploaded to DefectDojo as files"
+weight: 1
+chapter: true
+---
diff --git a/docs/content/en/integrations/parsers/file/acunetix.md b/docs/content/en/integrations/parsers/file/acunetix.md
new file mode 100644
index 00000000000..96a2c2005cc
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/acunetix.md
@@ -0,0 +1,8 @@
+---
+title: "Acunetix Scanner"
+toc_hide: true
+---
+XML format
+
+### Sample Scan Data
+Sample Acunetix Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/acunetix).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/acunetix360.md b/docs/content/en/integrations/parsers/file/acunetix360.md
new file mode 100644
index 00000000000..01b208bbeaa
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/acunetix360.md
@@ -0,0 +1,8 @@
+---
+title: "Acunetix 360 Scanner"
+toc_hide: true
+---
+Vulnerabilities List - JSON report
+
+### Sample Scan Data
+Sample Acunetix 360 Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/acunetix360).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/anchore_engine.md b/docs/content/en/integrations/parsers/file/anchore_engine.md
new file mode 100644
index 00000000000..9e6f8982785
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/anchore_engine.md
@@ -0,0 +1,40 @@
+---
+title: "Anchore-Engine"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a .json file.
+
+Using the [Anchore CLI](https://docs.anchore.com/current/docs/using/cli_usage/images/inspecting_image_content/) is the most reliable way to generate an Anchore report which DefectDojo can parse. When generating a report with the Anchore CLI, please use the following command to ensure complete data: `anchore-cli --json image vuln all`
+
+### Acceptable JSON Format
+All properties are strings and are required by the parser.
+
+~~~
+
+{
+ "imageDigest": "sha256:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
+ "vulnerabilities": [
+ {
+ "feed": "example-feed",
+ "feed_group": "example-feed-group",
+ "fix": "1.2.4",
+ "package": "example-package",
+ "package_cpe": "cpe:2.3:a:*:example:1.2.3:*:*:*:*:*:*:*",
+ "package_name": "example-package-name",
+ "package_path": "path/to/package",
+ "package_type": "dpkg",
+ "package_version": "1.2.3",
+ "severity": "Medium",
+ "url": "https://example.com/cve/CVE-2011-3389",
+ "vuln": "CVE-2011-3389"
+ },
+ ...
+ ],
+ "vulnerability_type": "os"
+}
+~~~
+
+### Sample Scan Data
+Sample Anchore-Engine scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/anchore_engine).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/anchore_enterprise.md b/docs/content/en/integrations/parsers/file/anchore_enterprise.md
new file mode 100644
index 00000000000..78d3441eb5c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/anchore_enterprise.md
@@ -0,0 +1,8 @@
+---
+title: "Anchore Enterprise Policy Check"
+toc_hide: true
+---
+Anchore-CLI JSON policy check report format.
+
+### Sample Scan Data
+Sample Anchore Enterprise Policy Check scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/anchore_enterprise).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/anchore_grype.md b/docs/content/en/integrations/parsers/file/anchore_grype.md
new file mode 100644
index 00000000000..f2adec0d073
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/anchore_grype.md
@@ -0,0 +1,192 @@
+---
+title: "Anchore Grype"
+toc_hide: true
+---
+### File Types
+DefectDojo parser accepts a .json file.
+
+Anchore Grype JSON files are created using the Grype CLI, using the '-o json' option. See: https://github.com/anchore/grype
+
+**Example:**
+{{< highlight bash >}}
+grype yourApp/example-page -o json > example_vulns.json
+{{< /highlight >}}
+
+### Acceptable JSON Format
+All properties are expected as strings and are required by the parser.
+
+~~~
+{
+ "matches": [
+ {
+ "vulnerability": {
+ "id": "example-id",
+ "dataSource": "https://example.org/.../example-id",
+ "namespace": "exampleName",
+ "severity": "exampleSeverity",
+ "urls": [
+ "https://example.org/.../example-id",
+ ...
+ ],
+ "cvss": [],
+ "fix": {
+ "versions": [],
+ "state": "not-fixed"
+ },
+ "advisories": []
+ },
+ "relatedVulnerabilities": [
+ {
+ "id": "first-related-example-id",
+ "dataSource": "https://example.org/.../related-example-id",
+ "namespace": "first-related-exampleName",
+ "severity": "first-related-exampleSeverity",
+ "urls": [
+ "https://example.org/.../related-example-id",
+ ...
+ ],
+ "description": "first-example-description",
+ "cvss": [
+ {
+ "version": "2.0",
+ "vector": "AV:L/AC:L/Au:N/C:N/I:P/A:N",
+ "metrics": {
+ "baseScore": 2.1,
+ "exploitabilityScore": 3.9,
+ "impactScore": 2.9
+ },
+ "vendorMetadata": {}
+ }
+ ]
+ },
+ ...
+ ],
+ "matchDetails": [
+ {
+ "matcher": "example-matcher",
+ "searchedBy": {
+ "distro": {
+ "type": "example-distrotype",
+ "version": "10"
+ },
+ "namespace": "exampleName",
+ "package": {
+ "name": "example-package",
+ "version": "1.17-3+deb10u3"
+ }
+ },
+ "found": {
+ "versionConstraint": "none (deb)"
+ }
+ }
+ ],
+ "artifact": {
+ "name": "example-artifact",
+ "version": "example-artifact-version",
+ "type": "example-type",
+ "locations": [
+ {
+ "path": ".../examplePath/",
+ "layerID": "exampleLayerID"
+ },
+ {
+ "path": ".../examplePath-2/",
+ "layerID": "exampleLayerID"
+ },
+ ...
+ ],
+ "language": "",
+ "licenses": [
+ "GPL-2"
+ ],
+ "cpes": [
+ "example-cpe",
+ ...
+ ],
+ "purl": "pkg:deb/debian/libgssapi-krb5-2@1.17-3+deb10u3?arch=amd64",
+ "metadata": {
+ "Source": "krb5"
+ }
+ }
+ },
+ ...
+ ],
+ "source": {
+ "type": "image",
+ "target": {
+ "userInput": "vulnerable-image:latest",
+ "imageID": "sha256:ce9898fd214aef9c994a42624b09056bdce3ff4a8e3f68dc242d967b80fcbeee",
+ "manifestDigest": "sha256:9d8825ab20ac86b40eb71495bece1608a302fb180384740697a28c2b0a5a0fc6",
+ "mediaType": "application/vnd.docker.distribution.manifest.v2+json",
+ "tags": [
+ "vulnerable-image:latest"
+ ],
+ "imageSize": 707381791,
+ "layers": [
+ {
+ "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
+ "digest": "sha256:d000633a56813933cb0ac5ee3246cf7a4c0205db6290018a169d7cb096581046",
+ "size": 69238554
+ },
+ ...
+ ],
+ "manifest": "exampleManifestString==",
+ "config": "exampleConfigString",
+ "repoDigests": []
+ }
+ },
+ "distro": {
+ "name": "debian",
+ "version": "10",
+ "idLike": ""
+ },
+ "descriptor": {
+ "name": "grype",
+ "version": "0.28.0",
+ "configuration": {
+ "configPath": "",
+ "output": "json",
+ "file": "",
+ "output-template-file": "",
+ "quiet": false,
+ "check-for-app-update": true,
+ "only-fixed": false,
+ "scope": "Squashed",
+ "log": {
+ "structured": false,
+ "level": "",
+ "file": ""
+ },
+ "db": {
+ "cache-dir": "/home/user/.cache/grype/db",
+ "update-url": "https://toolbox-data.anchore.io/grype/databases/listing.json",
+ "ca-cert": "",
+ "auto-update": true,
+ "validate-by-hash-on-start": false
+ },
+ "dev": {
+ "profile-cpu": false,
+ "profile-mem": false
+ },
+ "fail-on-severity": "",
+ "registry": {
+ "insecure-skip-tls-verify": false,
+ "insecure-use-http": false,
+ "auth": []
+ },
+ "ignore": null,
+ "exclude": []
+ },
+ "db": {
+ "built": "2021-12-24T08:14:02Z",
+ "schemaVersion": 3,
+ "location": "/home/user/.cache/grype/db/3",
+ "checksum": "sha256:6c4777e1acea787e5335ccee6b5e4562cd1767b9cca138c07e0802efb2a74162",
+ "error": null
+ }
+ }
+}
+~~~
+
+### Sample Scan Data
+Sample Grype scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/anchore_grype).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/anchorectl_policies.md b/docs/content/en/integrations/parsers/file/anchorectl_policies.md
new file mode 100644
index 00000000000..8ff36f72396
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/anchorectl_policies.md
@@ -0,0 +1,8 @@
+---
+title: "AnchoreCTL Policies Report"
+toc_hide: true
+---
+AnchoreCTLs JSON policies report format
+
+### Sample Scan Data
+Sample AnchoreCTL Policies Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/anchorectl_policies).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/anchorectl_vulns.md b/docs/content/en/integrations/parsers/file/anchorectl_vulns.md
new file mode 100644
index 00000000000..7f41b0e0a47
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/anchorectl_vulns.md
@@ -0,0 +1,8 @@
+---
+title: "AnchoreCTL Vuln Report"
+toc_hide: true
+---
+AnchoreCTLs JSON vulnerability report format
+
+### Sample Scan Data
+Sample AnchoreCTL Vuln Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/anchorectl_vulns).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/appspider.md b/docs/content/en/integrations/parsers/file/appspider.md
new file mode 100644
index 00000000000..0fd952c0f0d
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/appspider.md
@@ -0,0 +1,9 @@
+---
+title: "AppSpider (Rapid7)"
+toc_hide: true
+---
+Use the VulnerabilitiesSummary.xml file found in the zipped report
+download.
+
+### Sample Scan Data
+Sample AppSpider (Rapid7) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/appspider).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/aqua.md b/docs/content/en/integrations/parsers/file/aqua.md
new file mode 100644
index 00000000000..78b5f0cb384
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/aqua.md
@@ -0,0 +1,8 @@
+---
+title: "Aqua"
+toc_hide: true
+---
+JSON report format.
+
+### Sample Scan Data
+Sample Aqua scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/aqua).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/arachni.md b/docs/content/en/integrations/parsers/file/arachni.md
new file mode 100644
index 00000000000..0c48e534d13
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/arachni.md
@@ -0,0 +1,14 @@
+---
+title: "Arachni Scanner"
+toc_hide: true
+---
+Arachni Web Scanner (https://www.arachni-scanner.com)
+
+Reports are generated with `arachni_reporter` tool this way:
+
+{{< highlight bash >}}
+arachni_reporter --reporter 'json' js.com.afr
+{{< /highlight >}}
+
+### Sample Scan Data
+Sample Arachni Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/arachni).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/asff.md b/docs/content/en/integrations/parsers/file/asff.md
new file mode 100644
index 00000000000..de830908aa2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/asff.md
@@ -0,0 +1,13 @@
+---
+title: "AWS Security Finding Format (ASFF)"
+toc_hide: true
+---
+
+AWS Security Hub consumes, aggregates, organizes, and prioritizes findings from AWS security services and from the third-party product integrations. Security Hub processes these findings using a standard findings format called the AWS Security Finding Format (ASFF), which eliminates the need for time-consuming data conversion efforts. Then it correlates ingested findings across products to prioritize the most important ones.
+
+Reference: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html
+
+Prowler tool can generate this format with option `-M json-asff`.
+
+### Sample Scan Data
+Sample AWS Security Finding Format (ASFF) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/asff).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/auditjs.md b/docs/content/en/integrations/parsers/file/auditjs.md
new file mode 100644
index 00000000000..03ed4e4bf8e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/auditjs.md
@@ -0,0 +1,12 @@
+---
+title: "AuditJS (OSSIndex)"
+toc_hide: true
+---
+AuditJS scanning tool using OSSIndex database and generated with `--json` or `-j` option ().
+
+{{< highlight bash >}}
+auditjs ossi --json > auditjs_report.json
+{{< /highlight >}}
+
+### Sample Scan Data
+Sample AuditJS (OSSIndex) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/auditjs).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/aws_prowler.md b/docs/content/en/integrations/parsers/file/aws_prowler.md
new file mode 100644
index 00000000000..628b657ef07
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/aws_prowler.md
@@ -0,0 +1,8 @@
+---
+title: "AWS Prowler Scanner"
+toc_hide: true
+---
+Prowler file can be imported as a CSV (`-M csv`) or JSON (`-M json`) file.
+
+### Sample Scan Data
+Sample AWS Prowler Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/aws_prowler).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/aws_prowler_v3.md b/docs/content/en/integrations/parsers/file/aws_prowler_v3.md
new file mode 100644
index 00000000000..17dcf9698ae
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/aws_prowler_v3.md
@@ -0,0 +1,72 @@
+---
+title: "AWS Prowler V3"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a .json file. Please note: earlier versions of AWS Prowler create output data in a different format. See our other documentation if you are using an earlier version of AWS Prowler: https://documentation.defectdojo.com/integrations/parsers/file/aws_prowler/
+
+JSON reports can be created from the [AWS Prowler V3 CLI](https://docs.prowler.cloud/en/latest/tutorials/reporting/#json) using the following command: `prowler -M json`
+
+### Acceptable JSON Format
+Parser expects an array of assessments. All properties are strings and are required by the parser.
+
+~~~
+
+[
+ {
+ "AssessmentStartTime": "example_timestamp",
+ "FindingUniqueId": "example_uniqueIdFromTool",
+ "Provider": "example_provider",
+ "CheckID": "acm_certificates_expiration_check",
+ "CheckTitle": "Check if ACM Certificates are about to expire in specific days or less",
+ "CheckType": [
+ "Example ASFF-Compliant Finding Type"
+ ],
+ "ServiceName": "example_awsServiceName",
+ "SubServiceName": "",
+ "Status": "FAIL",
+ "StatusExtended": "Example status description",
+ "Severity": "example_severity",
+ "ResourceType": "AwsCertificateManagerCertificate",
+ "ResourceDetails": "",
+ "Description": "Example general test description.",
+ "Risk": "Example test impact description.",
+ "RelatedUrl": "https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html",
+ "Remediation": {
+ "Code": {
+ "NativeIaC": "",
+ "Terraform": "",
+ "CLI": "",
+ "Other": ""
+ },
+ "Recommendation": {
+ "Text": "Example recommendation.",
+ "Url": "https://docs.aws.amazon.com/config/latest/developerguide/example_related_documentation.html"
+ }
+ },
+ "Compliance": {
+ "GDPR": [
+ "article_32"
+ ],
+ ...
+ },
+ "Categories": [],
+ "DependsOn": [],
+ "RelatedTo": [],
+ "Notes": "",
+ "Profile": null,
+ "AccountId": "example_accountId",
+ "OrganizationsInfo": null,
+ "Region": "example_region",
+ "ResourceId": "example.resource.id.com",
+ "ResourceArn": "arn:aws:acm:us-east-1:999999999999:certificate/ffffffff-0000-0000-0000-000000000000",
+ "ResourceTags": {}
+ }
+ ...
+]
+
+~~~
+
+### Sample Scan Data
+Unit tests of AWS Prowler V3 JSON can be found at https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/aws_prowler_v3.
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/aws_scout2.md b/docs/content/en/integrations/parsers/file/aws_scout2.md
new file mode 100644
index 00000000000..2a5cbbf7157
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/aws_scout2.md
@@ -0,0 +1,17 @@
+---
+title: "AWS Scout2 Scanner (deprecated)"
+toc_hide: true
+---
+JS file in scout2-report/inc-awsconfig/aws\_config.js.
+
+{{% alert title="Warning" color="warning" %}}
+AWS Scout2 Scanner is deprecated and has been replaced with ScoutSuite (https://github.com/nccgroup/ScoutSuite) upstream.
+Please switch to the new parser for ScoutSuite.
+{{% /alert %}}
+
+{{% alert title="Warning" color="warning" %}}
+This parser is disactivated by default in releases >= 2.3.1 and will be removed in release >= 3.x.x.
+{{% /alert %}}
+
+### Sample Scan Data
+Sample AWS Scout2 Scanner (deprecated) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/aws_scout2).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/awssecurityhub.md b/docs/content/en/integrations/parsers/file/awssecurityhub.md
new file mode 100644
index 00000000000..3e101cdd22b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/awssecurityhub.md
@@ -0,0 +1,88 @@
+---
+title: "AWS Security Hub"
+toc_hide: true
+---
+### File Types
+DefectDojo parser accepts a .json file.
+
+JSON reports can be created from the [AWS Security Hub CLI](https://docs.aws.amazon.com/cli/latest/reference/securityhub/get-findings.html) using the following command: `aws securityhub get-findings`.
+
+### Acceptable JSON Format
+Parser expects a .json file, with an array of Findings contained within a single JSON object. All properties are strings and are required by the parser.
+
+~~~
+{
+ "findings": [
+ {
+ "SchemaVersion": "2018-10-08",
+ "Id": "arn:aws:securityhub:us-east-1:012345678912:subscription/aws-foundational-security-best-practices/v/1.0.0/IAM.5/finding/de861909-2d26-4e45-bd86-19d2ab6ceef1",
+ "ProductArn": "arn:aws:securityhub:us-east-1::product/aws/securityhub",
+ "GeneratorId": "aws-foundational-security-best-practices/v/1.0.0/IAM.5",
+ "AwsAccountId": "012345678912",
+ "Types": [
+ "Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices"
+ ],
+ "FirstObservedAt": "2020-06-08T14:33:07.560Z",
+ "LastObservedAt": "2020-06-14T21:02:53.940Z",
+ "CreatedAt": "2020-06-08T14:33:07.560Z",
+ "UpdatedAt": "2020-06-14T21:02:53.454Z",
+ "Severity": {
+ "Product": 0,
+ "Label": "INFORMATIONAL",
+ "Normalized": 0,
+ "Original": "INFORMATIONAL"
+ },
+ "Title": "IAM.5 MFA should be enabled for all IAM users that have console password",
+ "Description": "This AWS control checks whether AWS Multi-Factor Authentication (MFA) is enabled for all AWS Identity and Access Management (IAM) users that use a console password.",
+ "Remediation": {
+ "Recommendation": {
+ "Text": "For directions on how to fix this issue, please consult the AWS Security Hub Foundational Security Best Practices documentation.",
+ "Url": "https://docs.aws.amazon.com/console/securityhub/IAM.5/remediation"
+ }
+ },
+ "ProductFields": {
+ "StandardsArn": "arn:aws:securityhub:::standards/aws-foundational-security-best-practices/v/1.0.0",
+ "StandardsSubscriptionArn": "arn:aws:securityhub:us-east-1:012345678912:subscription/aws-foundational-security-best-practices/v/1.0.0",
+ "ControlId": "IAM.5",
+ "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/IAM.5/remediation",
+ "RelatedAWSResources:0/name": "securityhub-mfa-enabled-for-iam-console-access-9ae73a2f",
+ "RelatedAWSResources:0/type": "AWS::Config::ConfigRule",
+ "StandardsControlArn": "arn:aws:securityhub:us-east-1:012345678912:control/aws-foundational-security-best-practices/v/1.0.0/IAM.5",
+ "aws/securityhub/SeverityLabel": "INFORMATIONAL",
+ "aws/securityhub/ProductName": "Security Hub",
+ "aws/securityhub/CompanyName": "AWS",
+ "aws/securityhub/annotation": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted.",
+ "aws/securityhub/FindingId": "arn:aws:securityhub:us-east-1::product/aws/securityhub/arn:aws:securityhub:us-east-1:012345678912:subscription/aws-foundational-security-best-practices/v/1.0.0/IAM.5/finding/de861909-2d26-4e45-bd86-19d2ab6ceef1"
+ },
+ "Resources": [
+ {
+ "Type": "AwsAccount",
+ "Id": "AWS::::Account:012345678912",
+ "Partition": "aws",
+ "Region": "us-east-1"
+ }
+ ],
+ "Compliance": {
+ "Status": "PASSED",
+ "StatusReasons": [
+ {
+ "ReasonCode": "CONFIG_EVALUATIONS_EMPTY",
+ "Description": "AWS Config evaluated your resources against the rule. The rule did not apply to the AWS resources in its scope, the specified resources were deleted, or the evaluation results were deleted."
+ }
+ ]
+ },
+ "WorkflowState": "NEW",
+ "Workflow": {
+ "Status": "NEW"
+ },
+ "RecordState": "ACTIVE"
+ },
+ ...
+ ]
+}
+
+
+~~~
+
+### Sample Scan Data
+Sample scan data for testing purposes can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/awssecurityhub).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/azure_security_center_recommendations.md b/docs/content/en/integrations/parsers/file/azure_security_center_recommendations.md
new file mode 100644
index 00000000000..c4bffbd7a3a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/azure_security_center_recommendations.md
@@ -0,0 +1,8 @@
+---
+title: "Azure Security Center Recommendations Scan"
+toc_hide: true
+---
+Azure Security Center recommendations can be exported from the user interface in CSV format.
+
+### Sample Scan Data
+Sample Azure Security Center Recommendations Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/azure_security_center_recommendations).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/bandit.md b/docs/content/en/integrations/parsers/file/bandit.md
new file mode 100644
index 00000000000..70fde4b58b0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/bandit.md
@@ -0,0 +1,67 @@
+---
+title: "Bandit"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a .json file.
+
+To export a .json file from Bandit, you will need to install and run the .json report formatter from your Bandit instance.
+See Bandit documentation: https://bandit.readthedocs.io/en/latest/formatters/index.html
+
+### Acceptable JSON Format
+All properties are expected as strings, except "metrics" properties, which are expected as numbers. All properties are required by the parser.
+
+~~~
+{
+ "errors": [],
+ "generated_at": "example-timestamp",
+ "metrics": {
+ "_totals": {
+ "CONFIDENCE.HIGH": 1.0,
+ "CONFIDENCE.LOW": 0.0,
+ "CONFIDENCE.MEDIUM": 0.0,
+ "CONFIDENCE.UNDEFINED": 0.0,
+ "SEVERITY.HIGH": 0.0,
+ "SEVERITY.LOW": 1.0,
+ "SEVERITY.MEDIUM": 0.0,
+ "SEVERITY.UNDEFINED": 0.0,
+ "loc": 2,
+ "nosec": 0
+ },
+ "one/one.py": {
+ "CONFIDENCE.HIGH": 1.0,
+ "CONFIDENCE.LOW": 0.0,
+ "CONFIDENCE.MEDIUM": 0.0,
+ "CONFIDENCE.UNDEFINED": 0.0,
+ "SEVERITY.HIGH": 0.0,
+ "SEVERITY.LOW": 1.0,
+ "SEVERITY.MEDIUM": 0.0,
+ "SEVERITY.UNDEFINED": 0.0,
+ "loc": 2,
+ "nosec": 0
+ }
+ ...
+ },
+ "results": [
+ {
+ "code": "1 import os\n2 assert False\n",
+ "filename": "example.filename",
+ "issue_confidence": "example_confidence",
+ "issue_severity": "example_severity",
+ "issue_text": "Example issue description.",
+ "line_number": 2,
+ "line_range": [
+ 2
+ ],
+ "more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
+ "test_id": "B101",
+ "test_name": "assert_used"
+ }
+ ...
+ ]
+}
+~~~
+
+### Sample Scan Data
+Sample Bandit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/bandit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/blackduck.md b/docs/content/en/integrations/parsers/file/blackduck.md
new file mode 100644
index 00000000000..7f8226fd1e6
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/blackduck.md
@@ -0,0 +1,14 @@
+---
+title: "Blackduck Hub"
+toc_hide: true
+---
+2 options:
+
+* Import the zip file as can be created by Blackduck export.
+The zip file must contain the security.csv and files.csv in order to
+produce findings that bear file locations information.
+* Import a single security.csv file. Findings will not have any file location
+information.
+
+### Sample Scan Data
+Sample Blackduck Hub scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/blackduck).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md b/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md
new file mode 100644
index 00000000000..a51cea701a3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/blackduck_binary_analysis.md
@@ -0,0 +1,23 @@
+---
+title: "Blackduck Binary Analysis"
+toc_hide: true
+---
+
+#### **What** ####
+Black Duck Binary Analysis gives you visibility into open source and third-party dependencies that have been compiled into executables, libraries, containers, and firmware. You can analyze individual files using an intuitive user interface or Black Duck multifactor open source detection, which automates the scanning of binary artifacts.
+
+Using a combination of static and string analysis techniques coupled with fuzzy matching against the Black Duck KnowledgeBase, Black Duck Binary Analysis quickly and reliably identifies components, even if they’ve been modified.
+
+For more info, check out Black Duck Binary Analysis [here](https://www.synopsys.com/software-integrity/software-composition-analysis-tools/binary-analysis.html).
+
+#### **Why** ####
+Open source vulnerabilities aren’t the only security issues that might be lurking in application binaries.
+
+Black Duck Binary Analysis can also detect if sensitive information like email addresses, authorization tokens, compiler switches, and passwords are exposed, and it identifies when mobile applications request excessive permissions—all of which puts your organization and users' personal data at risk.
+
+#### **How** ####
+* Initiate Black Duck Binary Analysis scans using the UI, REST API, or drivers such as [pwn_bdba_scan](https://github.com/0dayinc/pwn/blob/master/bin/pwn_bdba_scan) found within the security automation framework, [PWN](https://github.com/0dayinc/pwn)
+* Import a single BDBA vulnerabilty csv results file into DefectDojo leveraging the UI, REST API, or drivers such as [pwn_defectdojo_importscan](https://github.com/0dayInc/pwn/blob/master/bin/pwn_defectdojo_importscan) or [pwn_defectdojo_reimportscan](https://github.com/0dayInc/pwn/blob/master/bin/pwn_defectdojo_reimportscan).
+
+### Sample Scan Data
+Sample Blackduck Binary Analysis scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/blackduck_binary_analysis).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/blackduck_component_risk.md b/docs/content/en/integrations/parsers/file/blackduck_component_risk.md
new file mode 100644
index 00000000000..0a8ff1e7b1e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/blackduck_component_risk.md
@@ -0,0 +1,8 @@
+---
+title: "Blackduck Component Risk"
+toc_hide: true
+---
+Upload the zip file containing the security.csv and files.csv.
+
+### Sample Scan Data
+Sample Blackduck Component Risk scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/blackduck_component_risk).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/brakeman.md b/docs/content/en/integrations/parsers/file/brakeman.md
new file mode 100644
index 00000000000..ca708641383
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/brakeman.md
@@ -0,0 +1,8 @@
+---
+title: "Brakeman Scan"
+toc_hide: true
+---
+Import Brakeman Scanner findings in JSON format.
+
+### Sample Scan Data
+Sample Brakeman Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/brakeman).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/bugcrowd.md b/docs/content/en/integrations/parsers/file/bugcrowd.md
new file mode 100644
index 00000000000..a04076f853e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/bugcrowd.md
@@ -0,0 +1,8 @@
+---
+title: "Bugcrowd"
+toc_hide: true
+---
+Import Bugcrowd results in CSV format.
+
+### Sample Scan Data
+Sample Bugcrowd scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/bugcrowd).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/bundler_audit.md b/docs/content/en/integrations/parsers/file/bundler_audit.md
new file mode 100644
index 00000000000..04d8bceb423
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/bundler_audit.md
@@ -0,0 +1,8 @@
+---
+title: "Bundler-Audit"
+toc_hide: true
+---
+Import the text output generated with bundle-audit check
+
+### Sample Scan Data
+Sample Bundler-Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/bundler_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/burp.md b/docs/content/en/integrations/parsers/file/burp.md
new file mode 100644
index 00000000000..e299f775b3c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/burp.md
@@ -0,0 +1,44 @@
+---
+title: "Burp XML"
+toc_hide: true
+---
+### File Types
+DefectDojo parser accepts Burp Issue data as an .xml file.
+To parse an HTML file instead, use this method: https://documentation.defectdojo.com/integrations/parsers/file/burp_enterprise/
+
+When the Burp report is generated, **the recommended option is Base64
+encoding both the request and response fields** - e.g. check the box
+that says \"Base64-encode requests and responses\". These fields will be
+processed and made available in the \'Finding View\' page.
+
+See also: Burp documentation - XML export is described under "Export Issue data". https://portswigger.net/burp/documentation/enterprise/work-with-scan-results/generate-reports
+
+### Acceptable XML Format
+All XML elements are required and will be parsed as strings.
+
+~~~
+
+
+ exampleSerialNumber
+ exampleTypeNumber
+ Example Issue Name
+ http://bwa
+
+
+ Example Severity
+ Firm
+
+
+
+
+
+
+ false
+
+
+ ...
+
+~~~
+
+### Sample Scan Data
+Sample Burp scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/burp).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/burp_api.md b/docs/content/en/integrations/parsers/file/burp_api.md
new file mode 100644
index 00000000000..686e781b043
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/burp_api.md
@@ -0,0 +1,8 @@
+---
+title: "Burp REST API"
+toc_hide: true
+---
+Import Burp REST API scan data in JSON format (/scan/[task_id] endpoint).
+
+### Sample Scan Data
+Sample Burp REST API scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/burp_api).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/burp_enterprise.md b/docs/content/en/integrations/parsers/file/burp_enterprise.md
new file mode 100644
index 00000000000..a328ac1b135
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/burp_enterprise.md
@@ -0,0 +1,13 @@
+---
+title: "Burp Enterprise Scan"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a Standard Report as an HTML file. To parse an XML file instead, use this method: https://documentation.defectdojo.com/integrations/parsers/file/burp/
+
+See also Burp documentation for info on how to export a Standard Report:
+https://portswigger.net/burp/documentation/enterprise/work-with-scan-results/generate-reports
+
+### Sample Scan Data
+Sample Burp Enterprise Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/burp_enterprise).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/burp_graphql.md b/docs/content/en/integrations/parsers/file/burp_graphql.md
new file mode 100644
index 00000000000..90d60c4394b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/burp_graphql.md
@@ -0,0 +1,108 @@
+---
+title: "Burp GraphQL"
+toc_hide: true
+---
+Import the JSON data returned from the BurpSuite Enterprise GraphQL API. Append all the
+issues returned to a list and save it as the value for the key "Issues". There is no need
+to filter duplicates, the parser will automatically combine issues with the same name.
+
+Example:
+
+{{< highlight json >}}
+{
+ "Issues": [
+ {
+ "issue_type": {
+ "name": "Cross-site scripting (reflected)",
+ "description_html": "Issue Description",
+ "remediation_html": "Issue Remediation",
+ "vulnerability_classifications_html": "CWE-79: Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')",
+ "references_html": "Cross-site scripting"
+ },
+ "description_html": "Details",
+ "remediation_html": "Remediation Details",
+ "severity": "high",
+ "path": "/burp",
+ "origin": "https://portswigger.net",
+ "evidence": [
+ {
+ "request_index": 0,
+ "request_segments": [
+ {
+ "data_html": "GET"
+ },
+ {
+ "highlight_html": "data"
+ },
+ {
+ "data_html": " HTTP More data"
+ }
+ ]
+ },
+ {
+ "response_index": 0,
+ "response_segments": [
+ {
+ "data_html": "HTTP/2 200 OK "
+ },
+ {
+ "highlight_html": "data"
+ },
+ {
+ "data_html": "More data"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
+{{< /highlight >}}
+
+Example GraphQL query to get issue details:
+
+{{< highlight graphql >}}
+ query Issue ($id: ID!, $serial_num: ID!) {
+ issue(scan_id: $id, serial_number: $serial_num) {
+ issue_type {
+ name
+ description_html
+ remediation_html
+ vulnerability_classifications_html
+ references_html
+ }
+ description_html
+ remediation_html
+ severity
+ path
+ origin
+ evidence {
+ ... on Request {
+ request_index
+ request_segments {
+ ... on DataSegment {
+ data_html
+ }
+ ... on HighlightSegment {
+ highlight_html
+ }
+ }
+ }
+ ... on Response {
+ response_index
+ response_segments {
+ ... on DataSegment {
+ data_html
+ }
+ ... on HighlightSegment {
+ highlight_html
+ }
+ }
+ }
+ }
+ }
+ }
+{{< /highlight >}}
+
+### Sample Scan Data
+Sample Burp GraphQL scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/burp_graphql).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/cargo_audit.md b/docs/content/en/integrations/parsers/file/cargo_audit.md
new file mode 100644
index 00000000000..d56b41200c8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/cargo_audit.md
@@ -0,0 +1,8 @@
+---
+title: "CargoAudit Scan"
+toc_hide: true
+---
+Import JSON output of cargo-audit scan report
+
+### Sample Scan Data
+Sample CargoAudit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/cargo_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/checkmarx.md b/docs/content/en/integrations/parsers/file/checkmarx.md
new file mode 100644
index 00000000000..679adf935f9
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/checkmarx.md
@@ -0,0 +1,17 @@
+---
+title: "Checkmarx"
+toc_hide: true
+---
+- `Checkmarx Scan`, `Checkmarx Scan detailed`: XML report from Checkmarx SAST (source code analysis)
+- `Checkmarx OSA`: json report from Checkmarx Open Source Analysis (dependencies analysis)
+
+To generate the OSA report using Checkmarx CLI:
+`./runCxConsole.sh OsaScan -v -CxServer <...> -CxToken <..> -projectName <...> -enableOsa -OsaLocationPath -OsaJson `
+
+That will generate three files, two of which are needed for defectdojo. Build the file for defectdojo with the jq utility:
+`jq -s . CxOSAVulnerabilities.json CxOSALibraries.json`
+
+Data for SAST, SCA and KICS are supported.
+
+### Sample Scan Data
+Sample Checkmarx scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkmarx).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/checkov.md b/docs/content/en/integrations/parsers/file/checkov.md
new file mode 100644
index 00000000000..8c45815da14
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/checkov.md
@@ -0,0 +1,52 @@
+---
+title: "Checkov Report"
+toc_hide: true
+---
+### File Types
+DefectDojo parser accepts Checkov scan data as a .JSON file.
+
+JSON files can be created from the Checkov CLI: https://www.checkov.io/2.Basics/CLI%20Command%20Reference.html
+
+### Acceptable JSON Format
+
+~~~
+{
+ "check_type": "terraform",
+ "results": {
+ "passed_checks": [
+ ],
+ "failed_checks": [
+ {
+ "check_id": "CKV_AZURE_41",
+ "check_name": "Ensure the key vault is recoverable",
+ "check_result": {
+ "result": "FAILED"
+ },
+ "code_block": [
+ ],
+ "file_path": "file_path",
+ "file_line_range": [
+ 1,
+ 16
+ ],
+ "resource": "azurerm_key_vault.main",
+ "check_class": "checkov.terraform.checks.resource.azure.KeyvaultRecoveryEnabled",
+ "guideline": "https://docs.bridgecrew.io/docs/ensure-the-key-vault-is-recoverable"
+ },
+ ...
+ ],
+ "skipped_checks": [],
+ "parsing_errors": []
+ },
+ "summary": {
+ "passed": 0,
+ "failed": 2,
+ "skipped": 0,
+ "parsing_errors": 0,
+ "checkov_version": "1.0.467"
+ }
+}
+~~~
+
+### Sample Scan Data
+Sample Checkov scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkov).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/chefinspect.md b/docs/content/en/integrations/parsers/file/chefinspect.md
new file mode 100644
index 00000000000..193dbb17817
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/chefinspect.md
@@ -0,0 +1,11 @@
+---
+title: "Chef Inspect Log"
+toc_hide: true
+---
+Chef Inspect outputs log from https://github.com/inspec/inspec
+
+### File Types
+DefectDojo parser accepts Chef Inspect log scan data as a .log or .txt file.
+
+### Sample Scan Data
+Sample Chef Inspect logs can be found at https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/chefinspect
diff --git a/docs/content/en/integrations/parsers/file/clair.md b/docs/content/en/integrations/parsers/file/clair.md
new file mode 100644
index 00000000000..235f801ee94
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/clair.md
@@ -0,0 +1,8 @@
+---
+title: "Clair Scan"
+toc_hide: true
+---
+Import JSON reports of Docker image vulnerabilities.
+
+### Sample Scan Data
+Sample Clair Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/clair).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/clair_klar.md b/docs/content/en/integrations/parsers/file/clair_klar.md
new file mode 100644
index 00000000000..05651bf267c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/clair_klar.md
@@ -0,0 +1,9 @@
+---
+title: "Clair Klar Scan"
+toc_hide: true
+---
+Import JSON reports of Docker image vulnerabilities from clair klar
+client.
+
+### Sample Scan Data
+Sample Clair Klar Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/clair_klar).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/cloudsploit.md b/docs/content/en/integrations/parsers/file/cloudsploit.md
new file mode 100644
index 00000000000..8e178efdffa
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/cloudsploit.md
@@ -0,0 +1,8 @@
+---
+title: "Cloudsploit (AquaSecurity)"
+toc_hide: true
+---
+From: https://github.com/aquasecurity/cloudsploit . Import the JSON output.
+
+### Sample Scan Data
+Sample Cloudsploit (AquaSecurity) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/cloudsploit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/cobalt.md b/docs/content/en/integrations/parsers/file/cobalt.md
new file mode 100644
index 00000000000..c17f0f662a5
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/cobalt.md
@@ -0,0 +1,8 @@
+---
+title: "Cobalt.io Scan"
+toc_hide: true
+---
+CSV Report
+
+### Sample Scan Data
+Sample Cobalt.io Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/cobalt).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/codechecker.md b/docs/content/en/integrations/parsers/file/codechecker.md
new file mode 100644
index 00000000000..912fdcab269
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/codechecker.md
@@ -0,0 +1,24 @@
+---
+title: "Codechecker Report native"
+toc_hide: true
+---
+Import Codechecker static analyzer report in JSON format: https://codechecker.readthedocs.io/en/latest/
+Report format described here: https://codechecker.readthedocs.io/en/latest/analyzer/user_guide/#parse
+
+One could make Codechecker JSON report using command like this:
+```shell
+CodeChecker parse /path/to/codechecker/analyzer/output/directory -e json -o /path/to/output/file.json
+```
+
+Before this step you should build your project with Codechecker build process interception,
+```shell
+odeChecker log -b "make -j8" -o ./my.project.codechecker.log
+```
+
+then analyze it
+```shell
+CodeChecker analyze ./codechecker.log -o /path/to/codechecker/analyzer/output/directory
+```
+
+### Sample Scan Data
+Sample Codechecker Report native scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/codechecker).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/codeql.md b/docs/content/en/integrations/parsers/file/codeql.md
new file mode 100644
index 00000000000..ed26dc1571f
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/codeql.md
@@ -0,0 +1,12 @@
+---
+title: "CodeQL"
+toc_hide: true
+---
+CodeQL can be used to generate a SARIF report, that can be imported into Defect Dojo:
+
+```shell
+codeql database analyze db python-security-and-quality.qls --sarif-add-snippets --format=sarif-latest --output=security-extended.sarif
+```
+
+The same can be achieved by running the CodeQL GitHub action with the `add-snippet` property set to true.
+
diff --git a/docs/content/en/integrations/parsers/file/contrast.md b/docs/content/en/integrations/parsers/file/contrast.md
new file mode 100644
index 00000000000..bf667bc7bd0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/contrast.md
@@ -0,0 +1,8 @@
+---
+title: "Contrast Scanner"
+toc_hide: true
+---
+CSV Report
+
+### Sample Scan Data
+Sample Contrast Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/contrast).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/coverity_api.md b/docs/content/en/integrations/parsers/file/coverity_api.md
new file mode 100644
index 00000000000..8d72942a292
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/coverity_api.md
@@ -0,0 +1,16 @@
+---
+title: "Coverity API"
+toc_hide: true
+---
+Export Coverity API view data in JSON format (`/api/viewContents/issues` endpoint).
+
+Currently these columns are mandatory:
+ * `displayType` (`Type` in the UI)
+ * `displayImpact` (`Impact` in the UI)
+ * `status` (`Status` in the UI)
+ * `firstDetected` (`First Detected` in the UI)
+
+Other supported attributes: `cwe`, `displayFile`, `occurrenceCount` and `firstDetected`
+
+### Sample Scan Data
+Sample Coverity API scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/coverity_api).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/crashtest_security.md b/docs/content/en/integrations/parsers/file/crashtest_security.md
new file mode 100644
index 00000000000..cce1b524cf6
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/crashtest_security.md
@@ -0,0 +1,8 @@
+---
+title: "Crashtest Security"
+toc_hide: true
+---
+Import JSON Report Import XML Report in JUnit Format
+
+### Sample Scan Data
+Sample Crashtest Security scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/crashtest_security).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/cred_scan.md b/docs/content/en/integrations/parsers/file/cred_scan.md
new file mode 100644
index 00000000000..7a52a74b141
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/cred_scan.md
@@ -0,0 +1,8 @@
+---
+title: "CredScan Report"
+toc_hide: true
+---
+Import CSV credential scanner reports
+
+### Sample Scan Data
+Sample CredScan Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/cred_scan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/cyclonedx.md b/docs/content/en/integrations/parsers/file/cyclonedx.md
new file mode 100644
index 00000000000..d0d6a4e61a2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/cyclonedx.md
@@ -0,0 +1,31 @@
+---
+title: "CycloneDX"
+toc_hide: true
+---
+CycloneDX is a lightweight software bill of materials (SBOM) standard designed for use in application security contexts and supply chain component analysis.
+
+From: https://www.cyclonedx.org/
+
+Example with Anchore Grype:
+
+{{< highlight bash >}}
+./grype defectdojo/defectdojo-django:1.13.1 -o cyclonedx > report.xml
+{{< /highlight >}}
+
+Example with `cyclonedx-bom` tool:
+
+{{< highlight bash >}}
+pip install cyclonedx-bom
+cyclonedx-py
+{{< /highlight >}}
+
+{{< highlight bash >}}
+ Usage: cyclonedx-py [OPTIONS]
+ Options:
+ -i - the alternate filename to a frozen requirements.txt
+ -o - the bom file to create
+ -j - generate JSON instead of XML
+{{< /highlight >}}
+
+### Sample Scan Data
+Sample CycloneDX scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/cyclonedx).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dawnscanner.md b/docs/content/en/integrations/parsers/file/dawnscanner.md
new file mode 100644
index 00000000000..bc3682cf9a8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dawnscanner.md
@@ -0,0 +1,8 @@
+---
+title: "DawnScanner"
+toc_hide: true
+---
+Import report in JSON generated with -j option
+
+### Sample Scan Data
+Sample DawnScanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dawnscanner).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dependency_check.md b/docs/content/en/integrations/parsers/file/dependency_check.md
new file mode 100644
index 00000000000..ddc631a1279
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dependency_check.md
@@ -0,0 +1,13 @@
+---
+title: "Dependency Check"
+toc_hide: true
+---
+OWASP Dependency Check output can be imported in Xml format. This parser ingests the vulnerable dependencies and inherits the suppressions.
+
+* Suppressed vulnerabilities are tagged with the tag: `suppressed`.
+* Suppressed vulnerabilities are marked as mitigated.
+* If the suppression is missing any `` tag, it tags them as `no_suppression_document`.
+* Related vulnerable dependencies are tagged with `related` tag.
+
+### Sample Scan Data
+Sample Dependency Check scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dependency_check).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dependency_track.md b/docs/content/en/integrations/parsers/file/dependency_track.md
new file mode 100644
index 00000000000..147d0afe4b5
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dependency_track.md
@@ -0,0 +1,14 @@
+---
+title: "Dependency Track"
+toc_hide: true
+---
+Dependency Track has implemented a DefectDojo integration. Information about
+how to configure the integration is documented here:
+https://docs.dependencytrack.org/integrations/defectdojo/
+
+Alternatively, the Finding Packaging Format (FPF) from OWASP Dependency Track can be
+imported in JSON format. See here for more info on this JSON format:
+
+
+### Sample Scan Data
+Sample Dependency Track scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dependency_track).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/detect_secrets.md b/docs/content/en/integrations/parsers/file/detect_secrets.md
new file mode 100644
index 00000000000..b9a54199389
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/detect_secrets.md
@@ -0,0 +1,8 @@
+---
+title: "Detect-secrets"
+toc_hide: true
+---
+Import of JSON report from
+
+### Sample Scan Data
+Sample Detect-secrets scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/detect_secrets).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dockerbench.md b/docs/content/en/integrations/parsers/file/dockerbench.md
new file mode 100644
index 00000000000..f4f2840fa75
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dockerbench.md
@@ -0,0 +1,9 @@
+---
+title: "docker-bench-security Scanner"
+toc_hide: true
+---
+Import JSON reports of OWASP [docker-bench-security](https://github.com/docker/docker-bench-security).
+docker-bench-security is a script that make tests based on [CIS Docker Benchmark](https://www.cisecurity.org/benchmark/docker/).
+
+### Sample Scan Data
+Sample docker-bench-security Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dockerbench).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dockle.md b/docs/content/en/integrations/parsers/file/dockle.md
new file mode 100644
index 00000000000..b3944b174da
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dockle.md
@@ -0,0 +1,9 @@
+---
+title: "Dockle Report"
+toc_hide: true
+---
+Import JSON container image linter reports
+
+
+### Sample Scan Data
+Sample Dockle Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dockle).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/drheader.md b/docs/content/en/integrations/parsers/file/drheader.md
new file mode 100644
index 00000000000..26789703c9f
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/drheader.md
@@ -0,0 +1,9 @@
+---
+title: "DrHeader"
+toc_hide: true
+---
+Import of JSON report from
+
+
+### Sample Scan Data
+Sample DrHeader scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/drheader).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/dsop.md b/docs/content/en/integrations/parsers/file/dsop.md
new file mode 100644
index 00000000000..cbee05be1b1
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/dsop.md
@@ -0,0 +1,8 @@
+---
+title: "DSOP Scan"
+toc_hide: true
+---
+Import XLSX findings from DSOP vulnerability scan pipelines.
+
+### Sample Scan Data
+Sample DSOP Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/dsop).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/edgescan.md b/docs/content/en/integrations/parsers/file/edgescan.md
new file mode 100644
index 00000000000..aca05133a74
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/edgescan.md
@@ -0,0 +1,6 @@
+---
+title: "Edgescan"
+toc_hide: true
+---
+Import Edgescan vulnerabilities by JSON file or [API - no file required](../../api/edgescan.md)
+
diff --git a/docs/content/en/integrations/parsers/file/eslint.md b/docs/content/en/integrations/parsers/file/eslint.md
new file mode 100644
index 00000000000..8bf3dbcafa0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/eslint.md
@@ -0,0 +1,8 @@
+---
+title: "ESLint"
+toc_hide: true
+---
+ESLint Json report format (-f json)
+
+### Sample Scan Data
+Sample ESLint scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/eslint).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/fortify.md b/docs/content/en/integrations/parsers/file/fortify.md
new file mode 100644
index 00000000000..bbd44f4fff3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/fortify.md
@@ -0,0 +1,8 @@
+---
+title: "Fortify"
+toc_hide: true
+---
+Import Findings from XML file format.
+
+### Sample Scan Data
+Sample Fortify scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/fortify).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gcloud_artifact_scan.md b/docs/content/en/integrations/parsers/file/gcloud_artifact_scan.md
new file mode 100644
index 00000000000..cb752af29c5
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gcloud_artifact_scan.md
@@ -0,0 +1,12 @@
+---
+title: "Google Cloud Artifact Vulnerability Scan"
+toc_hide: true
+---
+Google Cloud has a Artifact Registry that you can enable security scans https://cloud.google.com/artifact-registry/docs/analysis
+Once a scan is completed, results can be pulled via API/gcloud https://cloud.google.com/artifact-analysis/docs/metadata-storage and exported to JSON
+
+### File Types
+DefectDojo parser accepts Google Cloud Artifact Vulnerability Scan data as a .json file.
+
+### Sample Scan Data
+Sample reports can be found at https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gcloud_artifact_scan
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/generic.md b/docs/content/en/integrations/parsers/file/generic.md
new file mode 100644
index 00000000000..36e90ab6557
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/generic.md
@@ -0,0 +1,115 @@
+---
+title: "Generic Findings Import"
+toc_hide: true
+---
+Import Generic findings in CSV or JSON format.
+
+Attributes supported for CSV:
+- Date: Date of the finding in mm/dd/yyyy format.
+- Title: Title of the finding
+- CweId: Cwe identifier, must be an integer value.
+- Url: Url associated with the finding.
+- Severity: Severity of the finding. Must be one of Info, Low, Medium, High, or Critical.
+- Description: Description of the finding. Can be multiple lines if enclosed in double quotes.
+- Mitigation: Possible Mitigations for the finding. Can be multiple lines if enclosed in double quotes.
+- Impact: Detailed impact of the finding. Can be multiple lines if enclosed in double quotes.
+- References: References associated with the finding. Can be multiple lines if enclosed in double quotes.
+- Active: Indicator if the finding is active. Must be empty, TRUE or FALSE
+- Verified: Indicator if the finding has been verified. Must be empty, TRUE, or FALSE
+- FalsePositive: Indicator if the finding is a false positive. Must be TRUE, or FALSE.
+- Duplicate:Indicator if the finding is a duplicate. Must be TRUE, or FALSE
+
+The CSV expects a header row with the names of the attributes.
+
+Example of JSON format:
+
+```JSON
+{
+ "findings": [
+ {
+ "title": "test title with endpoints as dict",
+ "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau",
+ "severity": "Medium",
+ "mitigation": "Some mitigation",
+ "date": "2021-01-06",
+ "cve": "CVE-2020-36234",
+ "cwe": 261,
+ "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "file_path": "src/first.cpp",
+ "line": 13,
+ "endpoints": [
+ {
+ "host": "exemple.com"
+ }
+ ]
+ },
+ {
+ "title": "test title with endpoints as strings",
+ "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2",
+ "severity": "Critical",
+ "mitigation": "Some mitigation",
+ "date": "2021-01-06",
+ "cve": "CVE-2020-36235",
+ "cwe": 287,
+ "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "file_path": "src/two.cpp",
+ "line": 135,
+ "endpoints": [
+ "http://urlfiltering.paloaltonetworks.com/test-command-and-control",
+ "https://urlfiltering.paloaltonetworks.com:2345/test-pest"
+ ]
+ },
+ {
+ "title": "test title",
+ "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2",
+ "severity": "Critical",
+ "mitigation": "Some mitigation",
+ "date": "2021-01-06",
+ "cve": "CVE-2020-36236",
+ "cwe": 287,
+ "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "file_path": "src/threeeeeeeeee.cpp",
+ "line": 1353
+ }
+ ]
+}
+```
+
+This parser supports an attributes that accept files as Base64 strings. These files are attached to the respective findings.
+
+Example:
+
+```JSON
+{
+ "name": "My wonderful report",
+ "findings": [
+ {
+ "title": "Vuln with image",
+ "description": "Some very long description",
+ "severity": "Medium",
+ "files": [
+ {
+ "title": "Screenshot from 2017-04-10 16-54-19.png",
+ "data": "iVBORw0KGgoAAAANSUhEUgAABWgAAAK0CAIAAAARSkPJAAAAA3N<...>TkSuQmCC"
+ }
+ ]
+ }
+ ]
+}
+```
+
+This parser supports an attribute `name` and `type` to be able to define `TestType`. Based on this, you can define custom `HASHCODE_FIELDS` or `DEDUPLICATION_ALGORITHM` in the settings.
+
+Example:
+
+```JSON
+{
+ "name": "My wonderful report",
+ "type": "My custom Test type",
+ "findings": [
+ ]
+}
+```
+
+### Sample Scan Data
+Sample Generic Findings Import scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/generic).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ggshield.md b/docs/content/en/integrations/parsers/file/ggshield.md
new file mode 100644
index 00000000000..4f106162e5e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ggshield.md
@@ -0,0 +1,8 @@
+---
+title: "Ggshield"
+toc_hide: true
+---
+Import [Ggshield](https://github.com/GitGuardian/ggshield) findings in JSON format.
+
+### Sample Scan Data
+Sample Ggshield scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ggshield).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/github_vulnerability.md b/docs/content/en/integrations/parsers/file/github_vulnerability.md
new file mode 100644
index 00000000000..8e4f3a8222e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/github_vulnerability.md
@@ -0,0 +1,214 @@
+---
+title: "Github Vulnerability"
+toc_hide: true
+---
+Import findings from Github vulnerability scan (GraphQL Query):
+
+
+Currently the parser is able to manage only `RepositoryVulnerabilityAlert` object.
+The parser has some kind of search feature which detect the data in the report.
+
+Here is the mandatory objects and attributes:
+```
+vulnerabilityAlerts (RepositoryVulnerabilityAlert object)
+ + id
+ + createdAt (optional)
+ + vulnerableManifestPath
+ + state (optional)
+ + securityVulnerability (SecurityVulnerability object)
+ + severity (CRITICAL/HIGH/LOW/MODERATE)
+ + package (optional)
+ + name (optional)
+ + advisory (SecurityAdvisory object)
+ + description
+ + summary
+ + description
+ + identifiers
+ + value
+ + references (optional)
+ + url (optional)
+ + cvss (optional)
+ + score (optional)
+ + vectorString (optional)
+ + cwes (optional)
+```
+
+References:
+ - https://docs.github.com/en/graphql/reference/objects#repositoryvulnerabilityalert
+ - https://docs.github.com/en/graphql/reference/objects#securityvulnerability
+
+Github v4 graphql query to fetch data, with extended information like the repository name and url, alert number.
+
+{{< highlight graphql >}}
+query getVulnerabilitiesByRepoAndOwner($name: String!, $owner: String!) {
+ repository(name: $name, owner: $owner) {
+ vulnerabilityAlerts(first: 100, after:AFTER, states: OPEN) {
+ nodes {
+ id
+ createdAt
+ vulnerableManifestPath
+ securityVulnerability {
+ severity
+ updatedAt
+ package {
+ name
+ ecosystem
+ }
+ firstPatchedVersion {
+ identifier
+ }
+ vulnerableVersionRange
+ advisory {
+ description
+ summary
+ identifiers {
+ value
+ type
+ }
+ references {
+ url
+ }
+ cvss {
+ vectorString
+ }
+ }
+ }
+ vulnerableManifestPath
+ state
+ vulnerableManifestFilename
+ vulnerableRequirements
+ number
+ dependencyScope
+ dismissComment
+ dismissReason
+ dismissedAt
+ fixedAt
+ }
+ totalCount
+ pageInfo {
+ endCursor
+ hasNextPage
+ hasPreviousPage
+ startCursor
+ }
+ }
+ nameWithOwner
+ url
+ }
+}
+{{< /highlight >}}
+
+Another example of Python script, to have a function that queries any repository, with support for paginated responses and get all findings.
+Has a filter to only get OPEN dependabot alerts but this can be removed in the GraphQL query
+
+```python
+def make_query(after_cursor=None):
+ return """
+query getVulnerabilitiesByRepoAndOwner($name: String!, $owner: String!) {
+ repository(name: $name, owner: $owner) {
+ vulnerabilityAlerts(first: 100, after:AFTER, states: OPEN) {
+ nodes {
+ id
+ createdAt
+ vulnerableManifestPath
+ securityVulnerability {
+ severity
+ updatedAt
+ package {
+ name
+ ecosystem
+ }
+ firstPatchedVersion {
+ identifier
+ }
+ vulnerableVersionRange
+ advisory {
+ description
+ summary
+ identifiers {
+ value
+ type
+ }
+ references {
+ url
+ }
+ cvss {
+ vectorString
+ }
+ }
+ }
+ vulnerableManifestPath
+ state
+ vulnerableManifestFilename
+ vulnerableRequirements
+ number
+ dependencyScope
+ dismissComment
+ dismissReason
+ dismissedAt
+ fixedAt
+ }
+ totalCount
+ pageInfo {
+ endCursor
+ hasNextPage
+ hasPreviousPage
+ startCursor
+ }
+ }
+ nameWithOwner
+ url
+ }
+}
+""".replace(
+ "AFTER", '"{}"'.format(after_cursor) if after_cursor else "null"
+ )
+
+# accumulates all pages data into a single object
+def get_dependabot_alerts_repository(repo, owner):
+ keep_fetching = True
+ after_cursor = None
+ output_result = {"data": {"repository": {"vulnerabilityAlerts": {"nodes": []}}}}
+ while keep_fetching:
+ headers = {"Authorization": AUTH_TOKEN}
+
+ request = requests.post(
+ url="https://api.github.com/graphql",
+ json={
+ "operationName": "getVulnerabilitiesByRepoAndOwner",
+ "query": make_query(after_cursor),
+ "variables": {"name": repo, "owner": owner},
+ },
+ headers=headers,
+ )
+
+ result = request.json()
+ output_result["data"]["repository"]["name"] = result["data"]["repository"][
+ "name"
+ ]
+ output_result["data"]["repository"]["url"] = result["data"]["repository"]["url"]
+ if result["data"]["repository"]["vulnerabilityAlerts"]["totalCount"] == 0:
+ return None
+
+ output_result["data"]["repository"]["vulnerabilityAlerts"]["nodes"] += result[
+ "data"
+ ]["repository"]["vulnerabilityAlerts"]["nodes"]
+
+ keep_fetching = result["data"]["repository"]["vulnerabilityAlerts"]["pageInfo"][
+ "hasNextPage"
+ ]
+ after_cursor = result["data"]["repository"]["vulnerabilityAlerts"]["pageInfo"][
+ "endCursor"
+ ]
+ print(
+ "Fetched {} alerts for repo {}/{}".format(
+ result["data"]["repository"]["vulnerabilityAlerts"]["totalCount"],
+ owner,
+ repo,
+ )
+ )
+ return json.dumps(output_result, indent=2)
+```
+
+### Sample Scan Data
+Sample Github Vulnerability scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/github_vulnerability).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_api_fuzzing.md b/docs/content/en/integrations/parsers/file/gitlab_api_fuzzing.md
new file mode 100644
index 00000000000..9ef8535dace
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_api_fuzzing.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab API Fuzzing Report Scan"
+toc_hide: true
+---
+GitLab API Fuzzing Report report file can be imported in JSON format (option --json)
+
+### Sample Scan Data
+Sample GitLab API Fuzzing Report Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_api_fuzzing).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_container_scan.md b/docs/content/en/integrations/parsers/file/gitlab_container_scan.md
new file mode 100644
index 00000000000..5ff26c7573a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_container_scan.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab Container Scan"
+toc_hide: true
+---
+GitLab Container Scan report file can be imported in JSON format (option --json)
+
+### Sample Scan Data
+Sample GitLab Container Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_container_scan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_dast.md b/docs/content/en/integrations/parsers/file/gitlab_dast.md
new file mode 100644
index 00000000000..b3abcfcc8a4
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_dast.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab DAST Report"
+toc_hide: true
+---
+GitLab DAST Report in JSON format (option --json)
+
+### Sample Scan Data
+Sample GitLab DAST Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_dast).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_dep_scan.md b/docs/content/en/integrations/parsers/file/gitlab_dep_scan.md
new file mode 100644
index 00000000000..bb5e9bfe30b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_dep_scan.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab Dependency Scanning Report"
+toc_hide: true
+---
+Import Dependency Scanning Report vulnerabilities in JSON format: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#reports-json-format
+
+### Sample Scan Data
+Sample GitLab Dependency Scanning Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_dep_scan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_sast.md b/docs/content/en/integrations/parsers/file/gitlab_sast.md
new file mode 100644
index 00000000000..e592da480a4
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_sast.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab SAST Report"
+toc_hide: true
+---
+Import SAST Report vulnerabilities in JSON format: https://docs.gitlab.com/ee/user/application_security/sast/#reports-json-format
+
+### Sample Scan Data
+Sample GitLab SAST Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_sast).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitlab_secret_detection_report.md b/docs/content/en/integrations/parsers/file/gitlab_secret_detection_report.md
new file mode 100644
index 00000000000..f3a0d2dc99a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitlab_secret_detection_report.md
@@ -0,0 +1,8 @@
+---
+title: "GitLab Secret Detection Report"
+toc_hide: true
+---
+GitLab Secret Detection Report file can be imported in JSON format (option --json).
+
+### Sample Scan Data
+Sample GitLab Secret Detection Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitlab_secret_detection_report).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gitleaks.md b/docs/content/en/integrations/parsers/file/gitleaks.md
new file mode 100644
index 00000000000..00b067e4677
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gitleaks.md
@@ -0,0 +1,8 @@
+---
+title: "Gitleaks"
+toc_hide: true
+---
+Import Gitleaks findings in JSON format.
+
+### Sample Scan Data
+Sample Gitleaks scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gitleaks).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/gosec.md b/docs/content/en/integrations/parsers/file/gosec.md
new file mode 100644
index 00000000000..fbe5bcbf2e2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/gosec.md
@@ -0,0 +1,8 @@
+---
+title: "Gosec Scanner"
+toc_hide: true
+---
+Import Gosec Scanner findings in JSON format.
+
+### Sample Scan Data
+Sample Gosec Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/gosec).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/govulncheck.md b/docs/content/en/integrations/parsers/file/govulncheck.md
new file mode 100644
index 00000000000..8637fc2a429
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/govulncheck.md
@@ -0,0 +1,8 @@
+---
+title: "Govulncheck"
+toc_hide: true
+---
+JSON vulnerability report generated by govulncheck tool, using a command like `govulncheck -json . >> report.json`
+
+### Sample Scan Data
+Sample Govulncheck scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/govulncheck).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/h1.md b/docs/content/en/integrations/parsers/file/h1.md
new file mode 100644
index 00000000000..da01131f9c1
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/h1.md
@@ -0,0 +1,8 @@
+---
+title: "HackerOne Cases"
+toc_hide: true
+---
+Import HackerOne cases findings in JSON format
+
+### Sample Scan Data
+Sample HackerOne Cases scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/h1).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/hadolint.md b/docs/content/en/integrations/parsers/file/hadolint.md
new file mode 100644
index 00000000000..ccc60f7b637
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/hadolint.md
@@ -0,0 +1,8 @@
+---
+title: "Hadolint"
+toc_hide: true
+---
+Hadolint Dockerfile scan in json format.
+
+### Sample Scan Data
+Sample Hadolint scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/hadolint).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/harbor_vulnerability.md b/docs/content/en/integrations/parsers/file/harbor_vulnerability.md
new file mode 100644
index 00000000000..33878003bd0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/harbor_vulnerability.md
@@ -0,0 +1,9 @@
+---
+title: "Harbor Vulnerability"
+toc_hide: true
+---
+Import findings from Harbor registry container scan:
+
+
+### Sample Scan Data
+Sample Harbor Vulnerability scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/harbor_vulnerability).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/hcl_appscan.md b/docs/content/en/integrations/parsers/file/hcl_appscan.md
new file mode 100644
index 00000000000..2a837039553
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/hcl_appscan.md
@@ -0,0 +1,8 @@
+---
+title: "HCL Appscan"
+toc_hide: true
+---
+The HCL Appscan has the possibility to export the results in PDF, XML and CSV formats within the portal. However, this parser only supports the import of XML generated from HCL Appscan on cloud.
+
+### Sample Scan Data
+Sample HCL Appscan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/hcl_appscan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/horusec.md b/docs/content/en/integrations/parsers/file/horusec.md
new file mode 100644
index 00000000000..b347bef33e0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/horusec.md
@@ -0,0 +1,15 @@
+---
+title: "Horusec"
+toc_hide: true
+---
+Import findings from Horusec scan.
+
+```shell
+./horusec_linux_x64 start -O=report.json -o json -i="tests/"
+```
+
+References:
+ * [GitHub repository](https://github.com/ZupIT/horusec)
+
+### Sample Scan Data
+Sample Horusec scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/horusec).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/humble.md b/docs/content/en/integrations/parsers/file/humble.md
new file mode 100644
index 00000000000..e2e4faaec80
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/humble.md
@@ -0,0 +1,9 @@
+---
+title: "Humble Report"
+toc_hide: true
+---
+Import JSON report of the Humble scanner
+
+
+### Sample Scan Data
+Sample Humble Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/humble).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/huskyci.md b/docs/content/en/integrations/parsers/file/huskyci.md
new file mode 100644
index 00000000000..660e00505b4
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/huskyci.md
@@ -0,0 +1,9 @@
+---
+title: "HuskyCI Report"
+toc_hide: true
+---
+Import JSON reports from
+[HuskyCI]()
+
+### Sample Scan Data
+Sample HuskyCI Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/huskyci).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/hydra.md b/docs/content/en/integrations/parsers/file/hydra.md
new file mode 100644
index 00000000000..abd5a644d89
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/hydra.md
@@ -0,0 +1,43 @@
+---
+title: "Hydra"
+toc_hide: true
+---
+Import JSON reports from [THC Hydra](https://github.com/vanhauser-thc/thc-hydra).
+
+Hydra can discover weak login credentials on different types of services (e.g. RDP).
+
+As Hydra cannot provide a severity rating (as it doesn't know how severe a weak login is at this scanned service), all imported findings will be rated 'High'.
+
+Sample JSON report:
+```json
+{
+ "errormessages": [
+ "[ERROR] Error Message of Something",
+ "[ERROR] Another Message",
+ "These are very free form"
+ ],
+ "generator": {
+ "built": "2019-03-01 14:44:22",
+ "commandline": "hydra -b jsonv1 -o results.json ... ...",
+ "jsonoutputversion": "1.00",
+ "server": "127.0.0.1",
+ "service": "http-post-form",
+ "software": "Hydra",
+ "version": "v8.5"
+ },
+ "quantityfound": 1,
+ "results": [
+ {
+ "host": "127.0.0.1",
+ "login": "bill@example.com",
+ "password": "bill",
+ "port": 9999,
+ "service": "http-post-form"
+ }
+ ],
+ "success": false
+}
+```
+
+### Sample Scan Data
+Sample Hydra scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/hydra).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ibm_app.md b/docs/content/en/integrations/parsers/file/ibm_app.md
new file mode 100644
index 00000000000..71ffd51815a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ibm_app.md
@@ -0,0 +1,8 @@
+---
+title: "IBM AppScan DAST"
+toc_hide: true
+---
+XML file from IBM App Scanner.
+
+### Sample Scan Data
+Sample IBM AppScan DAST scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ibm_app).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/immuniweb.md b/docs/content/en/integrations/parsers/file/immuniweb.md
new file mode 100644
index 00000000000..6ab2cd139ad
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/immuniweb.md
@@ -0,0 +1,8 @@
+---
+title: "Immuniweb Scan"
+toc_hide: true
+---
+XML Scan Result File from Immuniweb Scan.
+
+### Sample Scan Data
+Sample Immuniweb Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/immuniweb).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/intsights.md b/docs/content/en/integrations/parsers/file/intsights.md
new file mode 100644
index 00000000000..64b6e58860e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/intsights.md
@@ -0,0 +1,65 @@
+---
+title: "IntSights Report"
+toc_hide: true
+---
+IntSights Threat Command is a commercial Threat Intelligence platform that monitors both the open and dark web to identify threats for the Assets you care about (Domain Names, IP addresses, Brand Names, etc.).
+
+##### Manual Import
+Use the Export CSV feature in the IntSights Threat Command GUI to create an *IntSights Alerts.csv* file. This CSV
+file can then be imported into Defect Dojo.
+
+##### Automated Import
+
+The IntSights `get-complete-alert` API only returns details for a single alert. To automate the process,
+individually fetch details for each alert and append to a list. The list is then saved as the value for the key
+"Alerts". This JSON object can then be imported into Defect Dojo.
+
+Example:
+
+ {
+ "Alerts":[
+ {
+ "_id":"5c80egf83b4a3900078b6be6",
+ "Details":{
+ "Source":{
+ "URL":"https://www.htbridge.com/websec/?id=ABCDEF",
+ "Date":"2018-03-08T00:01:02.622Z",
+ "Type":"Other",
+ "NetworkType":"ClearWeb"
+ },
+ "Images":[
+ "5c80egf833963a40007e01e8d",
+ "5c80egf833b4a3900078b6bea",
+ "5c80egf834626bd0007bd64db"
+ ],
+ "Title":"HTTP headers weakness in example.com web server",
+ "Tags":[],
+ "Type":"ExploitableData",
+ "Severity":"Critical",
+ "SubType":"VulnerabilityInTechnologyInUse",
+ "Description":"X-XSS-PROTECTION and CONTENT-SECURITY-POLICY headers were not sent by the server, which makes it vulnerable for various attack vectors"
+ },
+ "Assignees":[
+ "5c3c8f99903dfd0006ge5e61"
+ ],
+ "FoundDate":"2018-03-08T00:01:02.622Z",
+ "Assets":[
+ {
+ "Type":"Domains",
+ "Value":"example.com"
+ }
+ ],
+ "TakedownStatus":"NotSent",
+ "IsFlagged":false,
+ "UpdateDate":"2018-03-08T00:01:02.622Z",
+ "RelatedIocs":[],
+ "RelatedThreatIDs":[],
+ "Closed":{
+ "IsClosed":false
+ }
+ }
+ ]
+ }
+
+### Sample Scan Data
+Sample IntSights Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/intsights).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/jfrog_xray_api_summary_artifact.md b/docs/content/en/integrations/parsers/file/jfrog_xray_api_summary_artifact.md
new file mode 100644
index 00000000000..e99f5f4da61
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/jfrog_xray_api_summary_artifact.md
@@ -0,0 +1,13 @@
+---
+title: "JFrog Xray API Summary Artifact Scan"
+toc_hide: true
+---
+
+### File Types
+Accepts a JSON File, generated from the JFrog Artifact Summary API Call.
+
+### Sample Scan Data / Unit Tests
+Sample JFrog Xray API Summary Artifact Scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/jfrog_xray_api_summary_artifact).
+
+### Link To Tool
+See JFrog Documentation: https://jfrog.com/help/r/jfrog-rest-apis/summary
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/jfrog_xray_on_demand_binary_scan.md b/docs/content/en/integrations/parsers/file/jfrog_xray_on_demand_binary_scan.md
new file mode 100644
index 00000000000..438bf065a39
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/jfrog_xray_on_demand_binary_scan.md
@@ -0,0 +1,12 @@
+---
+title: "JFrog Xray On Demand Binary Scan"
+toc_hide: true
+---
+Import the JSON format for the \"JFrog Xray On Demand Binary Scan\" file. Use this importer for Xray version 3.X
+
+JFrog file documentation:
+
+https://jfrog.com/help/r/jfrog-cli/on-demand-binary-scan
+
+### Sample Scan Data
+Sample JFrog Xray On Demand Binary Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/jfrog_xray_on_demand_binary_scan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/jfrog_xray_unified.md b/docs/content/en/integrations/parsers/file/jfrog_xray_unified.md
new file mode 100644
index 00000000000..b8b55db0e79
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/jfrog_xray_unified.md
@@ -0,0 +1,8 @@
+---
+title: "JFrog XRay Unified"
+toc_hide: true
+---
+Import the JSON format for the \"Security & Compliance | Reports\" export. Jfrog's Xray tool is an add-on to their Artifactory repository that does Software Composition Analysis, see https://www.jfrog.com/confluence/display/JFROG/JFrog+Xray for more information. \"Xray Unified\" refers to Xray Version 3.0 and later.
+
+### Sample Scan Data
+Sample JFrog XRay Unified scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/jfrog_xray_unified).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/jfrogxray.md b/docs/content/en/integrations/parsers/file/jfrogxray.md
new file mode 100644
index 00000000000..c3cb126fa20
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/jfrogxray.md
@@ -0,0 +1,8 @@
+---
+title: "JFrogXRay"
+toc_hide: true
+---
+Import the JSON format for the \"Security Export\" file. Use this importer for Xray version 2.X
+
+### Sample Scan Data
+Sample JFrogXRay scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/jfrogxray).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kics.md b/docs/content/en/integrations/parsers/file/kics.md
new file mode 100644
index 00000000000..370421cce84
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kics.md
@@ -0,0 +1,8 @@
+---
+title: "KICS Scanner"
+toc_hide: true
+---
+Import of JSON report from
+
+### Sample Scan Data
+Sample KICS Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kics).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kiuwan.md b/docs/content/en/integrations/parsers/file/kiuwan.md
new file mode 100644
index 00000000000..00189e87726
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kiuwan.md
@@ -0,0 +1,8 @@
+---
+title: "Kiuwan Scanner"
+toc_hide: true
+---
+Import Kiuwan Scan in CSV format. Export as CSV Results on Kiuwan.
+
+### Sample Scan Data
+Sample Kiuwan Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kiuwan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kubeaudit.md b/docs/content/en/integrations/parsers/file/kubeaudit.md
new file mode 100644
index 00000000000..cefffcbc4a9
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kubeaudit.md
@@ -0,0 +1,8 @@
+---
+title: "Kubeaudit Scan"
+toc_hide: true
+---
+Kubeaudit is a command line tool and a Go package to audit Kubernetes clusters for various different security concerns. The output of of Kubeaudit which is supported within this parser is JSON. The tool can be found [here](https://github.com/Shopify/kubeaudit)
+
+### Sample Scan Data
+Sample Kubeaudit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kubeaudit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kubebench.md b/docs/content/en/integrations/parsers/file/kubebench.md
new file mode 100644
index 00000000000..89e1e3c3a6b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kubebench.md
@@ -0,0 +1,8 @@
+---
+title: "kube-bench Scanner"
+toc_hide: true
+---
+Import JSON reports of Kubernetes CIS benchmark scans.
+
+### Sample Scan Data
+Sample kube-bench Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kubebench).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kubehunter.md b/docs/content/en/integrations/parsers/file/kubehunter.md
new file mode 100644
index 00000000000..08f932d5f86
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kubehunter.md
@@ -0,0 +1,8 @@
+---
+title: "kubeHunter Scanner"
+toc_hide: true
+---
+Import JSON reports of kube-hunter scans. Use "kube-hunter --report json" to produce the report in json format.
+
+### Sample Scan Data
+Sample kubeHunter Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kubehunter).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/kubescape.md b/docs/content/en/integrations/parsers/file/kubescape.md
new file mode 100644
index 00000000000..fefb7cd476d
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/kubescape.md
@@ -0,0 +1,10 @@
+---
+title: "Kubescape Scanner"
+toc_hide: true
+---
+Kubescape is a K8s open-source tool providing a Kubernetes single pane of glass, including risk analysis, security compliance, RBAC visualizer, and image vulnerability scanning. Kubescape scans K8s clusters, YAML files, and HELM charts, detecting misconfigurations according to multiple frameworks (such as the NSA-CISA, MITRE ATT&CK®), software vulnerabilities, and RBAC (role-based-access-control) violations at early stages of the CI/CD pipeline, calculates risk score instantly and shows risk trends over time.
+
+The parser supports json output files
+
+### Sample Scan Data
+Sample Kubescape scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/kubescape).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/mend.md b/docs/content/en/integrations/parsers/file/mend.md
new file mode 100644
index 00000000000..2ec28770586
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/mend.md
@@ -0,0 +1,15 @@
+---
+title: "Mend Scan"
+toc_hide: true
+---
+
+### File Types
+Accepts a JSON file, generated from the Mend* Unified Agent.
+
+### Sample Scan Data / Unit Tests
+Unit tests for Mend JSON files can be found at https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mend
+
+### Link To Tool
+See documentation: https://docs.mend.io/bundle/unified_agent/page/example_of_a_unified_agent_json_report.html
+
+*Formerly known as Whitesource.*
diff --git a/docs/content/en/integrations/parsers/file/meterian.md b/docs/content/en/integrations/parsers/file/meterian.md
new file mode 100644
index 00000000000..bf2d3bea8bc
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/meterian.md
@@ -0,0 +1,8 @@
+---
+title: "Meterian Scanner"
+toc_hide: true
+---
+The Meterian JSON report output file can be imported.
+
+### Sample Scan Data
+Sample Meterian Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/meterian).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/microfocus_webinspect.md b/docs/content/en/integrations/parsers/file/microfocus_webinspect.md
new file mode 100644
index 00000000000..e087e4267e8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/microfocus_webinspect.md
@@ -0,0 +1,8 @@
+---
+title: "Microfocus Webinspect Scanner"
+toc_hide: true
+---
+Import XML report
+
+### Sample Scan Data
+Sample Microfocus Webinspect Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/microfocus_webinspect).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/mobsf.md b/docs/content/en/integrations/parsers/file/mobsf.md
new file mode 100644
index 00000000000..44985929fdb
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/mobsf.md
@@ -0,0 +1,8 @@
+---
+title: "MobSF Scanner"
+toc_hide: true
+---
+Export a JSON file using the API, api/v1/report\_json.
+
+### Sample Scan Data
+Sample MobSF Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mobsf).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/mobsfscan.md b/docs/content/en/integrations/parsers/file/mobsfscan.md
new file mode 100644
index 00000000000..7209f80b403
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/mobsfscan.md
@@ -0,0 +1,8 @@
+---
+title: "Mobsfscan"
+toc_hide: true
+---
+Import JSON report from
+
+### Sample Scan Data
+Sample Mobsfscan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mobsfscan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/mozilla_observatory.md b/docs/content/en/integrations/parsers/file/mozilla_observatory.md
new file mode 100644
index 00000000000..3d1150821d3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/mozilla_observatory.md
@@ -0,0 +1,8 @@
+---
+title: "Mozilla Observatory Scanner"
+toc_hide: true
+---
+Import JSON report.
+
+### Sample Scan Data
+Sample Mozilla Observatory Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/mozilla_observatory).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ms_defender.md b/docs/content/en/integrations/parsers/file/ms_defender.md
new file mode 100644
index 00000000000..2bf8c436ffd
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ms_defender.md
@@ -0,0 +1,10 @@
+---
+title: "MS Defender Parser"
+toc_hide: true
+---
+This parser helps to parse Microsoft Defender Findings and supports two types of imports:
+- You can import a JSON output file from the api/vulnerabilities/machinesVulnerabilities endpoint of Microsoft defender.
+- You can upload a custom zip file which include multiple JSON files from two Microsoft Defender Endpoints. For that you have to make your own zip file and include two folders (machines/ and vulnerabilities/) within the zip file. For vulnerabilities/ you can attach multiple JSON files from the api/vulnerabilities/machinesVulnerabilities REST API endpoint of Microsoft Defender. Furthermore, in machines/ you can attach the JSON output from the api/machines REST API endpoint of Microsoft Defender. Then, the parser uses the information in both folders to add more specific information like the affected IP Address to the finding.
+
+### Sample Scan Data
+Sample MS Defender Parser scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ms_defender).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/netsparker.md b/docs/content/en/integrations/parsers/file/netsparker.md
new file mode 100644
index 00000000000..7e46af07b12
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/netsparker.md
@@ -0,0 +1,8 @@
+---
+title: "Netsparker"
+toc_hide: true
+---
+Vulnerabilities List - JSON report
+
+### Sample Scan Data
+Sample Netsparker scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/netsparker).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/neuvector.md b/docs/content/en/integrations/parsers/file/neuvector.md
new file mode 100644
index 00000000000..5acf03267a2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/neuvector.md
@@ -0,0 +1,8 @@
+---
+title: "NeuVector (compliance)"
+toc_hide: true
+---
+Imports compliance scans returned by REST API.
+
+### Sample Scan Data
+Sample NeuVector (compliance) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/neuvector).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/neuvector_compliance.md b/docs/content/en/integrations/parsers/file/neuvector_compliance.md
new file mode 100644
index 00000000000..cce614b2f90
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/neuvector_compliance.md
@@ -0,0 +1,8 @@
+---
+title: "NeuVector (REST)"
+toc_hide: true
+---
+JSON output of /v1/scan/{entity}/{id} endpoint
+
+### Sample Scan Data
+Sample NeuVector (REST) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/neuvector_compliance).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/nexpose.md b/docs/content/en/integrations/parsers/file/nexpose.md
new file mode 100644
index 00000000000..f2380a3666e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/nexpose.md
@@ -0,0 +1,8 @@
+---
+title: "Nexpose XML 2.0 (Rapid7)"
+toc_hide: true
+---
+Use the full XML export template from Nexpose.
+
+### Sample Scan Data
+Sample Nexpose XML 2.0 (Rapid7) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/nexpose).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/nikto.md b/docs/content/en/integrations/parsers/file/nikto.md
new file mode 100644
index 00000000000..09bcce9c10a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/nikto.md
@@ -0,0 +1,15 @@
+---
+title: "Nikto"
+toc_hide: true
+---
+Nikto web server scanner - https://cirt.net/Nikto2
+
+The current parser support 3 sources:
+ - XML output (old)
+ - new XML output (with nxvmlversion=\"1.2\" type)
+ - JSON output
+
+See: https://github.com/sullo/nikto
+
+### Sample Scan Data
+Sample Nikto scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/nikto).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/nmap.md b/docs/content/en/integrations/parsers/file/nmap.md
new file mode 100644
index 00000000000..cada9ad2d3c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/nmap.md
@@ -0,0 +1,8 @@
+---
+title: "Nmap"
+toc_hide: true
+---
+XML output (use -oX)
+
+### Sample Scan Data
+Sample Nmap scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/nmap).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/npm_audit.md b/docs/content/en/integrations/parsers/file/npm_audit.md
new file mode 100644
index 00000000000..ebf280da964
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/npm_audit.md
@@ -0,0 +1,9 @@
+---
+title: "NPM Audit"
+toc_hide: true
+---
+Node Package Manager (NPM) Audit plugin output file can be imported in
+JSON format. Only imports the \'advisories\' subtree.
+
+### Sample Scan Data
+Sample NPM Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/npm_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/nsp.md b/docs/content/en/integrations/parsers/file/nsp.md
new file mode 100644
index 00000000000..916495ecdf2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/nsp.md
@@ -0,0 +1,8 @@
+---
+title: "Node Security Platform"
+toc_hide: true
+---
+Node Security Platform (NSP) output file can be imported in JSON format.
+
+### Sample Scan Data
+Sample Node Security Platform scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/nsp).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/nuclei.md b/docs/content/en/integrations/parsers/file/nuclei.md
new file mode 100644
index 00000000000..3e63a2b9429
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/nuclei.md
@@ -0,0 +1,8 @@
+---
+title: "Nuclei"
+toc_hide: true
+---
+Import JSON output of nuclei scan report
+
+### Sample Scan Data
+Sample Nuclei scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/nuclei).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/openscap.md b/docs/content/en/integrations/parsers/file/openscap.md
new file mode 100644
index 00000000000..220f27d66e0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/openscap.md
@@ -0,0 +1,8 @@
+---
+title: "Openscap Vulnerability Scan"
+toc_hide: true
+---
+Import Openscap Vulnerability Scan in XML formats.
+
+### Sample Scan Data
+Sample Openscap Vulnerability Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/openscap).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/openvas.md b/docs/content/en/integrations/parsers/file/openvas.md
new file mode 100644
index 00000000000..73fcbcebc17
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/openvas.md
@@ -0,0 +1,8 @@
+---
+title: "OpenVAS Parser"
+toc_hide: true
+---
+You can either upload the exported results of an OpenVAS Scan in a .csv or .xml format.
+
+### Sample Scan Data
+Sample OpenVAS scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/openvas).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ort.md b/docs/content/en/integrations/parsers/file/ort.md
new file mode 100644
index 00000000000..2aac161efd3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ort.md
@@ -0,0 +1,8 @@
+---
+title: "ORT evaluated model Importer"
+toc_hide: true
+---
+Import Outpost24 endpoint vulnerability scan in XML format.
+
+### Sample Scan Data
+Sample ORT evaluated model Importer scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ort).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ossindex_devaudit.md b/docs/content/en/integrations/parsers/file/ossindex_devaudit.md
new file mode 100644
index 00000000000..cb007e5a3e3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ossindex_devaudit.md
@@ -0,0 +1,9 @@
+---
+title: "OssIndex Devaudit"
+toc_hide: true
+---
+Import JSON formatted output from \[OSSIndex
+Devaudit\]().
+
+### Sample Scan Data
+Sample OssIndex Devaudit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ossindex_devaudit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/outpost24.md b/docs/content/en/integrations/parsers/file/outpost24.md
new file mode 100644
index 00000000000..2c0f974f02e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/outpost24.md
@@ -0,0 +1,8 @@
+---
+title: "Outpost24 Scan"
+toc_hide: true
+---
+Import Outpost24 endpoint vulnerability scan in XML format.
+
+### Sample Scan Data
+Sample Outpost24 Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/outpost24).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/php_security_audit_v2.md b/docs/content/en/integrations/parsers/file/php_security_audit_v2.md
new file mode 100644
index 00000000000..1abcb0e741c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/php_security_audit_v2.md
@@ -0,0 +1,8 @@
+---
+title: "PHP Security Audit v2"
+toc_hide: true
+---
+Import PHP Security Audit v2 Scan in JSON format.
+
+### Sample Scan Data
+Sample PHP Security Audit v2 scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/php_security_audit_v2).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/php_symfony_security_check.md b/docs/content/en/integrations/parsers/file/php_symfony_security_check.md
new file mode 100644
index 00000000000..27552cb8395
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/php_symfony_security_check.md
@@ -0,0 +1,8 @@
+---
+title: "PHP Symfony Security Checker"
+toc_hide: true
+---
+Import results from the PHP Symfony Security Checker.
+
+### Sample Scan Data
+Sample PHP Symfony Security Checker scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/php_symfony_security_check).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/pip_audit.md b/docs/content/en/integrations/parsers/file/pip_audit.md
new file mode 100644
index 00000000000..df24cdbe7a3
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/pip_audit.md
@@ -0,0 +1,8 @@
+---
+title: "pip-audit Scan"
+toc_hide: true
+---
+Import pip-audit JSON scan report
+
+### Sample Scan Data
+Sample pip-audit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pip_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/pmd.md b/docs/content/en/integrations/parsers/file/pmd.md
new file mode 100644
index 00000000000..ebb4d951764
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/pmd.md
@@ -0,0 +1,8 @@
+---
+title: "PMD Scan"
+toc_hide: true
+---
+CSV Report
+
+### Sample Scan Data
+Sample PMD Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pmd).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/popeye.md b/docs/content/en/integrations/parsers/file/popeye.md
new file mode 100644
index 00000000000..82dbdd89582
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/popeye.md
@@ -0,0 +1,68 @@
+---
+title: "Popeye"
+toc_hide: true
+---
+
+# Popeye Parser documentation.
+
+Popeye is a utility that scans live Kubernetes cluster and reports potential issues with deployed resources and configurations. For more information about the tool, please visit the public repository https://github.com/derailed/popeye.
+
+## Popeye reports.
+
+Popeye offer different format to export their reports, in this case for the parser we have selected to be done with JSON option for simplicity. Support for other report types planned for future.
+
+JSON reports have the following structure:
+
+```json
+{
+ "popeye": {
+ "score": 100,
+ "grade": "B",
+ "sanitizers": [
+ {
+ "sanitizer": "cluster",
+ "gvr": "cluster",
+ "tally": {
+ "ok": 1,
+ "info": 0,
+ "warning": 0,
+ "error": 0,
+ "score": 100
+ },
+ "issues": {
+ "Version": [
+ {
+ "group": "__root__",
+ "gvr": "cluster",
+ "level": 0,
+ "message": "[POP-406] K8s version OK"
+ }
+ ]
+ }
+ }
+ ]
+ }
+}
+```
+
+They offer a list of "sanitizers" that is the list of scanned resources in the cluster. At the same time, each sanitizer will have a list of issues, in this case the issues names will match to specific resources of the cluster (pods, roles, clusterroles, etc.) where each one will have inside a list of specific findings for that resource (issue in the report).
+
+This parser goes through every finding inside the issues of every sanitizer looking for the ones with level 1 (Info), 2 (Warning) or 3 (Error) to be created as findings in DefectDojo.
+
+## Findings severity matching.
+
+Popeye scan findings don't match to public vulnerabilities, it just looks for possible informational topic, warnings or errors in kubernetes resources definition or configuraiton, so they categorize their findings the following way:
+
+- Severity 0: Ok
+- Severity 1: Info
+- Severity 2: Warning
+- Severity 3: Error
+
+To match it to DefectDojo severity formula, Secerity 0 (Ok) findings from Popeye will be ignored as those are checks that does not need an action to be resolved. For the rest:
+
+- Severity 1 (Info) Popeye findings will be created as Severity "Info" findings in DefectDojo.
+- Severity 2 (Warning) Popeye findings will be created as Severity "Low" findings in DefectDojo.
+- Severity 3 (Errors) Popeye findings will be created as Severity "High" findingsi in DefectDojo.
+
+### Sample Scan Data
+Sample Popeye scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/popeye).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/pwn_sast.md b/docs/content/en/integrations/parsers/file/pwn_sast.md
new file mode 100644
index 00000000000..241f2c0ca6d
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/pwn_sast.md
@@ -0,0 +1,10 @@
+---
+title: "PWN Security Automation Framework"
+toc_hide: true
+---
+- (Main Page)\[\]
+- pwn_sast: Import the JSON results generated by the pwn_sast Driver. This driver scans source code repositories for security anti-patterns that may result in vulnerability identification.
+- More driver results coming soon...
+
+### Sample Scan Data
+Sample PWN Security Automation Framework scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pwn_sast).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/qualys.md b/docs/content/en/integrations/parsers/file/qualys.md
new file mode 100644
index 00000000000..7fd532c79a8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/qualys.md
@@ -0,0 +1,21 @@
+---
+title: "Qualys Scan"
+toc_hide: true
+---
+Qualys output files can be imported in API XML format. Qualys output
+files can be imported in WebGUI XML format.
+
+A CSV formatted Qualys Scan Report can also be used. Ensure the following values are checked in the Scan Report Template config:
+
+`CVSS Version = CVSSv3`
+
+* Vulnerability Details
+ * Threat
+ * Impact
+* Solution
+ * Patches and Workarounds
+ * Virtual Patches and Mitigating Controls
+* Results
+
+### Sample Scan Data
+Sample Qualys Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/qualys).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/qualys_infrascan_webgui.md b/docs/content/en/integrations/parsers/file/qualys_infrascan_webgui.md
new file mode 100644
index 00000000000..bba44904df1
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/qualys_infrascan_webgui.md
@@ -0,0 +1,8 @@
+---
+title: "Qualys Infrastructure Scan (WebGUI XML)"
+toc_hide: true
+---
+Qualys WebGUI output files can be imported in XML format.
+
+### Sample Scan Data
+Sample Qualys Infrastructure Scan (WebGUI XML) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/qualys_infrascan_webgui).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/qualys_webapp.md b/docs/content/en/integrations/parsers/file/qualys_webapp.md
new file mode 100644
index 00000000000..b8a4017b113
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/qualys_webapp.md
@@ -0,0 +1,8 @@
+---
+title: "Qualys Webapp Scan"
+toc_hide: true
+---
+Qualys WebScan output files can be imported in XML format.
+
+### Sample Scan Data
+Sample Qualys Webapp Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/qualys_webapp).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/redhatsatellite.md b/docs/content/en/integrations/parsers/file/redhatsatellite.md
new file mode 100644
index 00000000000..af047ef6c67
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/redhatsatellite.md
@@ -0,0 +1,8 @@
+---
+title: "Red Hat Satellite"
+toc_hide: true
+---
+You can import a JSON report which was retrieved through the REST API of Red Hat Satellite. The scanner can be found [here](https://www.redhat.com/en/technologies/management/satellite).
+
+### Sample Scan Data
+Sample Red Hat Satellite scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/redhatsatellite).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/retirejs.md b/docs/content/en/integrations/parsers/file/retirejs.md
new file mode 100644
index 00000000000..b975aa7b603
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/retirejs.md
@@ -0,0 +1,8 @@
+---
+title: "Retire.js"
+toc_hide: true
+---
+Retire.js JavaScript scan (\--js) output file can be imported in JSON format.
+
+### Sample Scan Data
+Sample Retire.js scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/retirejs).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/risk_recon.md b/docs/content/en/integrations/parsers/file/risk_recon.md
new file mode 100644
index 00000000000..917b7ed3bc5
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/risk_recon.md
@@ -0,0 +1,60 @@
+---
+title: "Risk Recon API Importer"
+toc_hide: true
+---
+Import findings from Risk Recon via the API. Configure your own JSON report as follows
+
+{{< highlight json >}}
+{
+ "url_endpoint": "https://api.riskrecon.com/v1",
+ "api_key": "you-api-key",
+ "companies": [
+ {
+ "name": "Company 1",
+ "filters": {
+ "domain_name": [],
+ "ip_address": ["127.0.0.1"],
+ "host_name": ["localhost"],
+ "asset_value": [],
+ "severity": ["critical", "high"],
+ "priority": [],
+ "hosting_provider": [],
+ "country_name": []
+ }
+ },
+ {
+ "name": "Company 2",
+ "filters": {
+ "ip_address": ["0.0.0.0"]
+ }
+ }
+
+ ],
+ "filters": {
+ "domain_name": [],
+ "ip_address": [],
+ "host_name": [],
+ "asset_value": [],
+ "severity": ["critical"],
+ "priority": [],
+ "hosting_provider": [],
+ "country_name": []
+ }
+}
+{{< /highlight >}}
+
+- More than one company finding list can be queried with it\'s own set
+ of filters. Company 1 shows all available fitlers, while Company 2
+ shows that empty filters need not be present.
+- To query all companies in your Risk Recon instance, simple remove
+ the \"companies\" field entirely.
+- If the \"companies\" field is not present, and filtering is still
+ requested, the \"filters\" field can be used to filter all findings
+ across all companies. It carries the same behavior as the company
+ filters. The \"filters\" field is disregarded in the prescense of
+ the \"companies\" field.
+- Removing both fields will allow retrieval of all findings in the
+ Risk Recon instance.
+
+### Sample Scan Data
+Sample Risk Recon API Importer scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/risk_recon).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/rubocop.md b/docs/content/en/integrations/parsers/file/rubocop.md
new file mode 100644
index 00000000000..8a90bd8eda4
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/rubocop.md
@@ -0,0 +1,8 @@
+---
+title: "Rubocop Scan"
+toc_hide: true
+---
+Import Rubocop JSON scan report (with option -f json).
+
+### Sample Scan Data
+Sample Rubocop Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/rubocop).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/rusty_hog.md b/docs/content/en/integrations/parsers/file/rusty_hog.md
new file mode 100644
index 00000000000..52849c8d99b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/rusty_hog.md
@@ -0,0 +1,18 @@
+---
+title: "Rusty Hog parser"
+toc_hide: true
+---
+From: Import the JSON output.
+Rusty Hog is a secret scanner built in Rust for performance, and based on TruffleHog which is written in Python.
+
+DefectDojo currently supports the parsing of the following Rusty Hog JSON outputs:
+- Choctaw Hog: Scans for secrets in a Git repository.
+- Duroc Hog: Scans for secrets in directories, files, and archives.
+- Gottingen Hog: Scans for secrets in a JIRA issue.
+- Essex Hog: Scans for secrets in a Confluence page.
+
+RustyHog scans only one target at a time. This is not efficient if you want to scan all targets (e.g. all JIRA tickets) and upload each single report to DefectDojo.
+[Rusty-Hog-Wrapper](https://github.com/manuel-sommer/Rusty-Hog-Wrapper) deals with this and scans a whole JIRA Project or Confluence Space, merges the findings into a valid file which can be uploaded to DefectDojo. (This is no official recommendation from DefectDojo, but rather a pointer in a direction on how to use this vulnerability scanner in a more efficient way.)
+
+### Sample Scan Data
+Sample Rusty Hog parser scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/rusty_hog).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/sarif.md b/docs/content/en/integrations/parsers/file/sarif.md
new file mode 100644
index 00000000000..2b7f2d1009e
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sarif.md
@@ -0,0 +1,29 @@
+---
+title: "SARIF"
+toc_hide: true
+---
+OASIS Static Analysis Results Interchange Format (SARIF). SARIF is
+supported by many tools. More details about the format here:
+
+
+{{% alert title="Information" color="info" %}}
+SARIF parser customizes the Test_Type with data from the report.
+For example, a report with `Dockle` as a driver name will produce a Test with a Test_Type named `Dockle Scan (SARIF)`
+{{% /alert %}}
+
+{{% alert title="Warning" color="warning" %}}
+Current implementation is limited and will aggregate all the findings in the SARIF file in one single report.
+{{% /alert %}}
+
+##### Support for de-duplication (fingerprinting)
+
+SARIF parser take into account data for fingerprinting. It's base on `fingerprints` and `partialFingerprints` properties.
+It's possible to activate de-duplication based on this data by customizing settings.
+
+```Python
+# in your settings.py file
+DEDUPLICATION_ALGORITHM_PER_PARSER["SARIF"] = DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE
+```
+
+### Sample Scan Data
+Sample SARIF scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sarif).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/scantist.md b/docs/content/en/integrations/parsers/file/scantist.md
new file mode 100644
index 00000000000..a29f1392d58
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/scantist.md
@@ -0,0 +1,9 @@
+---
+title: "Scantist Scan"
+toc_hide: true
+---
+Scantist is an open source management platform. Scan and remediate open source security, licensing and compliance risks across your software development lifecycle.
+Here you can find more information:
+
+### Sample Scan Data
+Sample Scantist Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/scantist).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/scout_suite.md b/docs/content/en/integrations/parsers/file/scout_suite.md
new file mode 100644
index 00000000000..7e97dbfd309
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/scout_suite.md
@@ -0,0 +1,12 @@
+---
+title: "ScoutSuite"
+toc_hide: true
+---
+Multi-Cloud security auditing tool. It uses APIs exposed by cloud
+providers. Scan results are located at
+`scan-reports/scoutsuite-results/scoutsuite\_\*.json` files.
+Multiple scans will create multiple files if they are runing agains
+different Cloud projects. See
+
+### Sample Scan Data
+Sample ScoutSuite scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/scout_suite).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/semgrep.md b/docs/content/en/integrations/parsers/file/semgrep.md
new file mode 100644
index 00000000000..b88c8ed9d66
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/semgrep.md
@@ -0,0 +1,8 @@
+---
+title: "Semgrep JSON Report"
+toc_hide: true
+---
+Import Semgrep output (--json)
+
+### Sample Scan Data
+Sample Semgrep JSON Report scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/semgrep).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/skf.md b/docs/content/en/integrations/parsers/file/skf.md
new file mode 100644
index 00000000000..c2fcfa27411
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/skf.md
@@ -0,0 +1,8 @@
+---
+title: "SKF Scan"
+toc_hide: true
+---
+Output of SKF Sprint summary export.
+
+### Sample Scan Data
+Sample SKF Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/skf).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/snyk.md b/docs/content/en/integrations/parsers/file/snyk.md
new file mode 100644
index 00000000000..f8cc7463789
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/snyk.md
@@ -0,0 +1,9 @@
+---
+title: "Snyk"
+toc_hide: true
+---
+Snyk output file (snyk test \--json \> snyk.json) can be imported in
+JSON format. Only SCA (Software Composition Analysis) report is supported (SAST report not supported yet).
+
+### Sample Scan Data
+Sample Snyk scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/snyk).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/solar_appscreener.md b/docs/content/en/integrations/parsers/file/solar_appscreener.md
new file mode 100644
index 00000000000..80ab6a894d1
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/solar_appscreener.md
@@ -0,0 +1,8 @@
+---
+title: "Solar Appscreener Scan"
+toc_hide: true
+---
+Solar Appscreener report file can be imported in CSV format from Detailed_Results.csv
+
+### Sample Scan Data
+Sample Solar Appscreener Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/solar_appscreener).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/sonarqube.md b/docs/content/en/integrations/parsers/file/sonarqube.md
new file mode 100644
index 00000000000..4f5e90ed128
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sonarqube.md
@@ -0,0 +1,27 @@
+---
+title: "SonarQube"
+toc_hide: true
+---
+## SonarQube Scan (Aggregates findings per cwe, title, description, file\_path.)
+
+SonarQube output file can be imported in HTML format or JSON format. JSON format generated by options `--save-report-json` and have same behavior with HTML format.
+
+To generate the report, see
+
+
+Version: \>= 1.1.0
+Recommend version for both format \>= 3.1.2
+
+## SonarQube Scan Detailed (Import all findings from SonarQube html report.)
+
+SonarQube output file can be imported in HTML format or JSON format. JSON format generated by options `--save-report-json` and have same behavior with HTML format.
+
+To generate the report, see
+
+
+Version: \>= 1.1.0.
+Recommend version for both format \>= 3.1.2
+
+
+### Sample Scan Data
+Sample SonarQube scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sonarqube).
diff --git a/docs/content/en/integrations/parsers/file/sonatype.md b/docs/content/en/integrations/parsers/file/sonatype.md
new file mode 100644
index 00000000000..c993fdd3f15
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sonatype.md
@@ -0,0 +1,8 @@
+---
+title: "Sonatype"
+toc_hide: true
+---
+JSON output.
+
+### Sample Scan Data
+Sample Sonatype scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sonatype).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/spotbugs.md b/docs/content/en/integrations/parsers/file/spotbugs.md
new file mode 100644
index 00000000000..69a288e5b5b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/spotbugs.md
@@ -0,0 +1,8 @@
+---
+title: "SpotBugs"
+toc_hide: true
+---
+XML report of textui cli.
+
+### Sample Scan Data
+Sample SpotBugs scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/spotbugs).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ssh_audit.md b/docs/content/en/integrations/parsers/file/ssh_audit.md
new file mode 100644
index 00000000000..29f95a82260
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ssh_audit.md
@@ -0,0 +1,8 @@
+---
+title: "SSH Audit"
+toc_hide: true
+---
+Import JSON output of ssh_audit report. See
+
+### Sample Scan Data
+Sample SSH Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ssh_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/ssl_labs.md b/docs/content/en/integrations/parsers/file/ssl_labs.md
new file mode 100644
index 00000000000..cd5972e126b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/ssl_labs.md
@@ -0,0 +1,8 @@
+---
+title: "SSL Labs"
+toc_hide: true
+---
+JSON Output of ssllabs-scan cli.
+
+### Sample Scan Data
+Sample SSL Labs scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/ssl_labs).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/sslscan.md b/docs/content/en/integrations/parsers/file/sslscan.md
new file mode 100644
index 00000000000..0255e5858ab
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sslscan.md
@@ -0,0 +1,8 @@
+---
+title: "Sslscan"
+toc_hide: true
+---
+Import XML output of sslscan report.
+
+### Sample Scan Data
+Sample Sslscan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sslscan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/sslyze.md b/docs/content/en/integrations/parsers/file/sslyze.md
new file mode 100644
index 00000000000..8abfd44b8fa
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sslyze.md
@@ -0,0 +1,12 @@
+---
+title: "Sslyze Scan"
+toc_hide: true
+---
+## Sslyze Scan
+XML report of SSLyze version 2 scan
+
+## SSLyze 3 Scan (JSON)
+JSON report of SSLyze version 3 scan
+
+### Sample Scan Data
+Sample Sslyze Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sslyze).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/stackhawk.md b/docs/content/en/integrations/parsers/file/stackhawk.md
new file mode 100644
index 00000000000..4f66fb5a82c
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/stackhawk.md
@@ -0,0 +1,9 @@
+---
+title: "StackHawk HawkScan"
+toc_hide: true
+---
+Import the JSON webhook event from StackHawk.
+For more information, check out our [docs on hooking up StackHawk to Defect Dojo](https://docs.stackhawk.com/workflow-integrations/defect-dojo.html)
+
+### Sample Scan Data
+Sample StackHawk HawkScan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/stackhawk).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/sysdig_reports.md b/docs/content/en/integrations/parsers/file/sysdig_reports.md
new file mode 100644
index 00000000000..9865597f7e2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/sysdig_reports.md
@@ -0,0 +1,11 @@
+---
+title: "Sysdig Vulnerability Reports"
+toc_hide: true
+---
+Import CSV report files from Sysdig or a Sysdig UI JSON Report
+Parser will accept Pipeline, Registry and Runtime reports created from the UI
+
+More information available at [our reporting docs page](https://docs.sysdig.com/en/docs/sysdig-secure/vulnerabilities/reporting)
+
+### Sample Scan Data
+Sample Sysdig Vulnerability Reports scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/sysdig_reports).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/talisman.md b/docs/content/en/integrations/parsers/file/talisman.md
new file mode 100644
index 00000000000..c542a1f0f2d
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/talisman.md
@@ -0,0 +1,44 @@
+---
+title: "Talisman"
+toc_hide: true
+---
+Run [Talisman](https://github.com/thoughtworks/talisman) in CLI mode and use "**--scan**" argument to scan the git commit history along with "**--reportDirectory**" argument to save the scan reports to a directory. The report will be in JSON format.
+
+Additionally, you can set up Git Hooks to automate the scan and then send the generated reports to DefectDojo using its API.
+
+Example:
+
+```bash
+#!/bin/sh
+
+# Set DefectDojo API credential and other variables
+DEFECTDOJO_API_KEY="your-api-key"
+DEFECTDOJO_URL="https://your-defectdojo-url.com"
+TALISMAN_RESULTS_DIR="$HOME"
+
+# Run talisman in CLI mode and output the result in JSON format
+CMD="talisman --scan --ignoreHistory --reportDirectory $TALISMAN_RESULTS_DIR"
+$CMD
+
+# Extract the result
+result=$(jq '.results[].filename' "${TALISMAN_RESULTS_DIR}/talisman_reports/data/report.json")
+
+# Check if result is not empty
+if [ -n "$result" ]; then
+ # If talisman found issues, send the JSON output to DefectDojo API endpoint
+ curl -X POST \
+ -H "Authorization: Token $DEFECTDOJO_API_KEY" \
+ -H "Content-Type: application/json" \
+ -d "@$TALISMAN_RESULTS_DIR/talisman_reports/data/report.json" \
+ "$DEFECTDOJO_URL/api/v2/import-scan/"
+
+ # Exit with a non-zero status code to indicate that the commit should be rejected
+ exit 1
+else
+ # If talisman did not find any issues, exit with a zero status code
+ exit 0
+fi
+```
+
+### Sample Scan Data
+Sample Talisman scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/talisman).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/tenable.md b/docs/content/en/integrations/parsers/file/tenable.md
new file mode 100644
index 00000000000..a4f0ad59030
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/tenable.md
@@ -0,0 +1,9 @@
+---
+title: "Tenable"
+toc_hide: true
+---
+Reports can be imported in the CSV, and .nessus (XML) report formats.
+Legacy Nessus and Nessus WAS reports are supported
+
+### Sample Scan Data
+Sample Tenable scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/tenable).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/terrascan.md b/docs/content/en/integrations/parsers/file/terrascan.md
new file mode 100644
index 00000000000..c5d6016c5a0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/terrascan.md
@@ -0,0 +1,8 @@
+---
+title: "Terrascan"
+toc_hide: true
+---
+Import JSON output of terrascan scan report
+
+### Sample Scan Data
+Sample Terrascan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/terrascan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/testssl.md b/docs/content/en/integrations/parsers/file/testssl.md
new file mode 100644
index 00000000000..501cb8b8a7a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/testssl.md
@@ -0,0 +1,8 @@
+---
+title: "Testssl Scan"
+toc_hide: true
+---
+Import CSV output of testssl scan report.
+
+### Sample Scan Data
+Sample Testssl Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/testssl).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/tfsec.md b/docs/content/en/integrations/parsers/file/tfsec.md
new file mode 100644
index 00000000000..7a0aca9d57a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/tfsec.md
@@ -0,0 +1,8 @@
+---
+title: "TFSec"
+toc_hide: true
+---
+Import of JSON report from
+
+### Sample Scan Data
+Sample TFSec scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/tfsec).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/threagile.md b/docs/content/en/integrations/parsers/file/threagile.md
new file mode 100644
index 00000000000..6469fb54752
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/threagile.md
@@ -0,0 +1,88 @@
+---
+title: "Threagile"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a .json file.
+JSON reports are created from the Threagile tool (default name `risks.json`) using the following command:
+
+```shell
+docker run --rm -it -v "$(pwd)":/app/work threagile/threagile -verbose -model /app/work/threagile.yaml -output /app/work
+```
+
+
+### Acceptable JSON Format
+Parser expects an array of finding. All properties are strings. Required fields are the following
+- "category"
+- "title"
+- "severity"
+- "synthetic_id"
+- "exploitation_impact"
+
+`catergory` fields is used to set both the title of the Finding as well as the cwe.
+`most_relevant_technical_asset` field is used to determine the component.
+~~~
+
+[
+ {
+ "category": "unguarded-direct-datastore-access",
+ "risk_status": "unchecked",
+ "severity": "elevated",
+ "exploitation_likelihood": "likely",
+ "exploitation_impact": "medium",
+ "title": "\u003cb\u003eUnguarded Direct Datastore Access\u003c/b\u003e of \u003cb\u003ePoliciesRegoStorage\u003c/b\u003e by \u003cb\u003eEnergon\u003c/b\u003e via \u003cb\u003eEnergonToPolicyRegoFileStorage\u003c/b\u003e",
+ "synthetic_id": "unguarded-direct-datastore-access@energon-ta\u003eenergontopolicyregofilestorage@energon-ta@policies-rego-storage-ta",
+ "most_relevant_data_asset": "",
+ "most_relevant_technical_asset": "policies-rego-storage-ta",
+ "most_relevant_trust_boundary": "",
+ "most_relevant_shared_runtime": "",
+ "most_relevant_communication_link": "energon-ta\u003eenergontopolicyregofilestorage",
+ "data_breach_probability": "improbable",
+ "data_breach_technical_assets": [
+ "policies-rego-storage-ta"
+ ]
+ },
+ {
+ "category": "unguarded-direct-datastore-access",
+ "risk_status": "in-discussion",
+ "severity": "elevated",
+ "exploitation_likelihood": "likely",
+ "exploitation_impact": "medium",
+ "title": "\u003cb\u003eUnguarded Direct Datastore Access\u003c/b\u003e of \u003cb\u003ePoliciesRegoStorage\u003c/b\u003e by \u003cb\u003eIAMSidecar\u003c/b\u003e via \u003cb\u003eIAMBachendAPIPoliciesRegoFileStorage\u003c/b\u003e",
+ "synthetic_id": "unguarded-direct-datastore-access@iam-sidecar-ta\u003eiambachendapipoliciesregofilestorage@iam-sidecar-ta@policies-rego-storage-ta",
+ "most_relevant_data_asset": "",
+ "most_relevant_technical_asset": "policies-rego-storage-ta",
+ "most_relevant_trust_boundary": "",
+ "most_relevant_shared_runtime": "",
+ "most_relevant_communication_link": "iam-sidecar-ta\u003eiambachendapipoliciesregofilestorage",
+ "data_breach_probability": "improbable",
+ "data_breach_technical_assets": [
+ "policies-rego-storage-ta"
+ ]
+ },
+ {
+ "category": "unguarded-direct-datastore-access",
+ "risk_status": "accepted",
+ "severity": "elevated",
+ "exploitation_likelihood": "likely",
+ "exploitation_impact": "medium",
+ "title": "\u003cb\u003eUnguarded Direct Datastore Access\u003c/b\u003e of \u003cb\u003ePoliciesRegoStorage\u003c/b\u003e by \u003cb\u003eIDMSidecar\u003c/b\u003e via \u003cb\u003eIAMSidecarPoliciesRegoFileStorage\u003c/b\u003e",
+ "synthetic_id": "unguarded-direct-datastore-access@idm-sidecar-ta\u003eiamsidecarpoliciesregofilestorage@idm-sidecar-ta@policies-rego-storage-ta",
+ "most_relevant_data_asset": "",
+ "most_relevant_technical_asset": "policies-rego-storage-ta",
+ "most_relevant_trust_boundary": "",
+ "most_relevant_shared_runtime": "",
+ "most_relevant_communication_link": "idm-sidecar-ta\u003eiamsidecarpoliciesregofilestorage",
+ "data_breach_probability": "improbable",
+ "data_breach_technical_assets": [
+ "policies-rego-storage-ta"
+ ]
+ },
+ ...
+]
+
+~~~
+
+### Sample Scan Data
+Sample Threagile scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/threagile).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trivy.md b/docs/content/en/integrations/parsers/file/trivy.md
new file mode 100644
index 00000000000..01823598b70
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trivy.md
@@ -0,0 +1,8 @@
+---
+title: "Trivy"
+toc_hide: true
+---
+JSON report of [trivy scanner](https://github.com/aquasecurity/trivy).
+
+### Sample Scan Data
+Sample Trivy scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trivy).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trivy_operator.md b/docs/content/en/integrations/parsers/file/trivy_operator.md
new file mode 100644
index 00000000000..1433b8231fe
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trivy_operator.md
@@ -0,0 +1,10 @@
+---
+title: "Trivy Operator"
+toc_hide: true
+---
+JSON report of [trivy operator scanner](https://github.com/aquasecurity/trivy-operator).
+
+To import the generated Vulnerability Reports, you can also use the [trivy-dojo-report-operator](https://github.com/telekom-mms/trivy-dojo-report-operator).
+
+### Sample Scan Data
+Sample Trivy Operator scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trivy_operator).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trufflehog.md b/docs/content/en/integrations/parsers/file/trufflehog.md
new file mode 100644
index 00000000000..c787e8e8105
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trufflehog.md
@@ -0,0 +1,8 @@
+---
+title: "Trufflehog"
+toc_hide: true
+---
+JSON Output of Trufflehog. Supports version 2 and 3 of https://github.com/trufflesecurity/trufflehog
+
+### Sample Scan Data
+Sample Trufflehog scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trufflehog).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trufflehog3.md b/docs/content/en/integrations/parsers/file/trufflehog3.md
new file mode 100644
index 00000000000..44fd436d541
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trufflehog3.md
@@ -0,0 +1,8 @@
+---
+title: "Trufflehog3"
+toc_hide: true
+---
+JSON Output of Trufflehog3, a fork of TruffleHog located at https://github.com/feeltheajf/truffleHog3
+
+### Sample Scan Data
+Sample Trufflehog3 scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trufflehog3).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trustwave.md b/docs/content/en/integrations/parsers/file/trustwave.md
new file mode 100644
index 00000000000..e5c6305ea7b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trustwave.md
@@ -0,0 +1,8 @@
+---
+title: "Trustwave"
+toc_hide: true
+---
+CSV output of Trustwave vulnerability scan.
+
+### Sample Scan Data
+Sample Trustwave scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trustwave).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/trustwave_fusion_api.md b/docs/content/en/integrations/parsers/file/trustwave_fusion_api.md
new file mode 100644
index 00000000000..d4f61fd0570
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/trustwave_fusion_api.md
@@ -0,0 +1,8 @@
+---
+title: "Trustwave Fusion API Scan"
+toc_hide: true
+---
+Trustwave Fusion API report file can be imported in JSON format
+
+### Sample Scan Data
+Sample Trustwave Fusion API Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/trustwave_fusion_api).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/twistlock.md b/docs/content/en/integrations/parsers/file/twistlock.md
new file mode 100644
index 00000000000..e682da7402b
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/twistlock.md
@@ -0,0 +1,14 @@
+---
+title: "Twistlock"
+toc_hide: true
+---
+JSON output of the `twistcli` tool. Example:
+
+{{< highlight bash >}}
+./twistcli images scan --address https:// --user --details --output-file=
+{{< /highlight >}}
+
+The CSV output from the UI is now also accepted.
+
+### Sample Scan Data
+Sample Twistlock scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/twistlock).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/vcg.md b/docs/content/en/integrations/parsers/file/vcg.md
new file mode 100644
index 00000000000..f2bdb310057
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/vcg.md
@@ -0,0 +1,8 @@
+---
+title: "Visual Code Grepper (VCG)"
+toc_hide: true
+---
+VCG output can be imported in CSV or Xml formats.
+
+### Sample Scan Data
+Sample Visual Code Grepper (VCG) scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/vcg).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/veracode.md b/docs/content/en/integrations/parsers/file/veracode.md
new file mode 100644
index 00000000000..77237860413
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/veracode.md
@@ -0,0 +1,51 @@
+---
+title: "Veracode"
+toc_hide: true
+---
+
+Veracode reports can be ingested in either XML or JSON Format
+
+- Detailed XML Report
+- JSON REST Findings from `/appsec/v2/applications/{application_guid}/findings/`
+ - Acceptable scan types include `STATIC`, `DYNAMIC`, and `SCA`
+ - Findings with a status of `CLOSED` will not be imported into DefectDojo
+ - Acceptable formats are as follows:
+ - Findings list
+ - Requires slight modification of the response returned from the API
+ - Exmample of a request being: `url | jq "{findings}"`
+ - Desired Format:
+ ```
+ {
+ "findings": [
+ {
+ ...
+ },
+ ...
+ ]
+ }
+ ```
+ - Embedded
+ - This response can be saved directly to a file and uploaded
+ - Not as ideal for crafting a refined report consisting of multiple requests
+ - Desired Format:
+ ```
+ {
+ "_embedded": {
+ "findings": [
+ {
+ ...
+ },
+ ...
+ ]
+ },
+ "_links": {
+ ...
+ },
+ "page": {
+ ...
+ }
+ }
+ ```
+
+### Sample Scan Data
+Sample Veracode scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/veracode).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/veracode_sca.md b/docs/content/en/integrations/parsers/file/veracode_sca.md
new file mode 100644
index 00000000000..59db59d2a31
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/veracode_sca.md
@@ -0,0 +1,8 @@
+---
+title: "Veracode SourceClear"
+toc_hide: true
+---
+Import Project CSV or JSON report
+
+### Sample Scan Data
+Sample Veracode SourceClear scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/veracode_sca).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/wapiti.md b/docs/content/en/integrations/parsers/file/wapiti.md
new file mode 100644
index 00000000000..53a4cd619b0
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/wapiti.md
@@ -0,0 +1,8 @@
+---
+title: "Wapiti Scan"
+toc_hide: true
+---
+Import XML report.
+
+### Sample Scan Data
+Sample Wapiti Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/wapiti).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/wazuh.md b/docs/content/en/integrations/parsers/file/wazuh.md
new file mode 100644
index 00000000000..329372ff84d
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/wazuh.md
@@ -0,0 +1,52 @@
+---
+title: "Wazuh Scanner"
+toc_hide: true
+---
+
+### File Types
+DefectDojo parser accepts a .json file from [Wazuh](https://wazuh.com). The export from Wazuh can be done via 2 ways. Choose the one which you prefer.
+
+- export the Wazuh findings from API and upload them to DefectDojo. This method may be the easiest one but does export all known vulnerabilities at once. It is not possible to sort them after clients or any other categories. You will receive all vulnerabilities in one engagement. It also does not output the endpoint of a finding.
+- export the findings via the script [available here](https://github.com/quirinziessler/wazuh-findings-exporter). The script fetches the findings by Wazuh client groups and saves them as json, ready for upload. You will receive one file per group allowing you to separate the clients via engagements in Wazuh. It also exports the endpoints hostname and displays them in DefectDojo UI.
+
+Independent of your above choice: Have in mind to adjust the max file size via "DD_SCAN_FILE_MAX_SIZE" if you see files larger than the default value of 100MB. Depending on the amount and category of integrated devices, the file size jumps rapidly.
+
+### Acceptable JSON Format
+Parser expects a .json file structured as below.
+
+~~~
+{
+ "data": {
+ "affected_items": [
+ {
+ "architecture": "amd64",
+ "condition": "Package less than 4.3.2",
+ "cve": "CVE-1234-123123",
+ "cvss2_score": 0,
+ "cvss3_score": 5.5,
+ "detection_time": "2023-02-08T13:55:10Z",
+ "external_references": [
+ "https://nvd.nist.gov/vuln/detail/CVE-YYYY-XXXXX",
+ "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-YYYY-XXXXX"
+ ],
+ "name": "asdf",
+ "published": "2022-09-01",
+ "severity": "Medium",
+ "status": "VALID",
+ "title": "CVE-YYYY-XXXXX affects asdf",
+ "type": "PACKAGE",
+ "updated": "2022-09-07",
+ "version": "4.3.1"
+ }
+ ],
+ "failed_items": [],
+ "total_affected_items": 1,
+ "total_failed_items": 0
+ },
+ "error": 0,
+ "message": "All selected vulnerabilities were returned"
+}
+~~~
+
+### Sample Scan Data
+Sample Wazuh Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/wazuh).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/wfuzz.md b/docs/content/en/integrations/parsers/file/wfuzz.md
new file mode 100644
index 00000000000..1893c359bd2
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/wfuzz.md
@@ -0,0 +1,20 @@
+---
+title: "Wfuzz JSON importer"
+toc_hide: true
+---
+Import the result of Wfuzz (https://github.com/xmendez/wfuzz) if you export in JSON the result (`wfuzz -o json -f myJSONReport.json,json`).
+
+The return code matching are directly put in Severity as follow(this is hardcoded in the parser actually).
+
+HTTP Return Code | Severity
+-----------------|---------
+200 | High
+302 | Low
+401 | Medium
+403 | Medium
+404 | Medium
+407 | Medium
+500 | Low
+
+### Sample Scan Data
+Sample Wfuzz JSON importer scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/wfuzz).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/whispers.md b/docs/content/en/integrations/parsers/file/whispers.md
new file mode 100644
index 00000000000..dfa5b104ef7
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/whispers.md
@@ -0,0 +1,9 @@
+---
+title: "Whispers"
+toc_hide: true
+---
+Import Whispers JSON results.
+https://github.com/adeptex/whispers
+
+### Sample Scan Data
+Sample Whispers scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/whispers).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/whitehat_sentinel.md b/docs/content/en/integrations/parsers/file/whitehat_sentinel.md
new file mode 100644
index 00000000000..756fac5069a
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/whitehat_sentinel.md
@@ -0,0 +1,8 @@
+---
+title: "WhiteHat Sentinel"
+toc_hide: true
+---
+WhiteHat Sentinel output from api/vuln/query_site can be imported in JSON format.
+
+### Sample Scan Data
+Sample WhiteHat Sentinel scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/whitehat_sentinel).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/wpscan.md b/docs/content/en/integrations/parsers/file/wpscan.md
new file mode 100644
index 00000000000..3e47e2bc6f8
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/wpscan.md
@@ -0,0 +1,8 @@
+---
+title: "Wpscan Scanner"
+toc_hide: true
+---
+Import JSON report.
+
+### Sample Scan Data
+Sample Wpscan Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/wpscan).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/xanitizer.md b/docs/content/en/integrations/parsers/file/xanitizer.md
new file mode 100644
index 00000000000..553292b5928
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/xanitizer.md
@@ -0,0 +1,9 @@
+---
+title: "Xanitizer"
+toc_hide: true
+---
+Import XML findings list report, preferably with parameter
+\'generateDetailsInFindingsListReport=true\'.
+
+### Sample Scan Data
+Sample Xanitizer scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/xanitizer).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/yarn_audit.md b/docs/content/en/integrations/parsers/file/yarn_audit.md
new file mode 100644
index 00000000000..e7de450a756
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/yarn_audit.md
@@ -0,0 +1,8 @@
+---
+title: "Yarn Audit"
+toc_hide: true
+---
+Import Yarn Audit scan report in JSON format. Use something like `yarn audit --json > yarn_report.json`.
+
+### Sample Scan Data
+Sample Yarn Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/yarn_audit).
\ No newline at end of file
diff --git a/docs/content/en/integrations/parsers/file/zap.md b/docs/content/en/integrations/parsers/file/zap.md
new file mode 100644
index 00000000000..43fd58e05c7
--- /dev/null
+++ b/docs/content/en/integrations/parsers/file/zap.md
@@ -0,0 +1,8 @@
+---
+title: "Zed Attack Proxy"
+toc_hide: true
+---
+ZAP XML report format (with or without requests and responses).
+
+### Sample Scan Data
+Sample Zed Attack Proxy scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/zap).
\ No newline at end of file
diff --git a/docs/content/en/integrations/social-authentication.md b/docs/content/en/integrations/social-authentication.md
index 6b1b7a378a3..4856ec1777c 100644
--- a/docs/content/en/integrations/social-authentication.md
+++ b/docs/content/en/integrations/social-authentication.md
@@ -92,8 +92,7 @@ to be created. Closely follow the steps below to guarantee success.
DD_SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_EMAILS = ['']
{{< /highlight >}}
-OKTA
-----
+## OKTA
In a similar fashion to that of Google, using OKTA as a OAuth2 provider
carries the same attributes and a similar procedure. Follow along below.
@@ -137,7 +136,7 @@ carries the same attributes and a similar procedure. Follow along below.
DD_SOCIAL_AUTH_OKTA_OAUTH2_ENABLED=True,
DD_SOCIAL_AUTH_OKTA_OAUTH2_KEY=(str, '**YOUR_CLIENT_ID_FROM_STEP_ABOVE**'),
DD_SOCIAL_AUTH_OKTA_OAUTH2_SECRET=(str, '**YOUR_CLIENT_SECRET_FROM_STEP_ABOVE**'),
- DD_SOCIAL_AUTH_OKTA_OAUTH2_API_URL=(str, 'https://{your-org-url}/oauth2/default'),
+ DD_SOCIAL_AUTH_OKTA_OAUTH2_API_URL=(str, 'https://{your-org-url}/oauth2'),
{{< /highlight >}}
If during the login process you get the following error: *The
@@ -148,7 +147,7 @@ GET parameter starts with `http://` instead of
`SOCIAL_AUTH_REDIRECT_IS_HTTPS = True` in the settings.
## Azure Active Directory
-
+### Azure AD Configuration
You can now use your corporate Azure Active Directory to authenticate
users to Defect Dojo. Users will be using your corporate Azure AD
account (A.K.A. Office 365 identity) to authenticate via OAuth, and all
@@ -158,7 +157,7 @@ in, it will try to match the UPN of the user to an existing e-mail from
a user in Defect Dojo, and if no match is found, a new user will be
created in Defect Dojo, associated with the unique id/value of the user
provided by your Azure AD tenant. Then, you can assign roles to this
-user, such as 'staff' or 'superuser'
+user, such as 'superuser'.
1. Navigate to the following address and follow instructions to create
a new app registration
@@ -191,26 +190,61 @@ user, such as 'staff' or 'superuser'
5. Restart your Dojo, and you should now see a **Login with Azure AD**
button on the login page which should *magically* work
+### Automatic Import of User-Groups
+To import groups from Azure AD users, the following environment variable needs to be set:
+
+ {{< highlight python >}}
+ DD_SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_GET_GROUPS=True
+ {{< /highlight >}}
+
+This will ensure the user is added to all the groups found in the Azure AD Token. Any missing groups will be created in DefectDojo (unless filtered). This group synchronization allows for product access via groups to limit the products a user can interact with.
+
+The Azure AD token returned by Azure will also need to be configured to include group IDs. Without this step, the
+token will not contain any notion of a group, and the mapping process will report that the current user is not a member of any
+groups. To update the the format of the token, add a group claim that applies to whatever group type you are using.
+If unsure of what type that is, select `All Groups`. Do not activate `Emit groups as role claims` within the Azure AD
+"Token configuration" page.
+
+Application API permissions need to be updated with the `Group.Read.All` permission so that groups can be read on behalf
+of the user that has successfully signed in.
+
+To limit the amount of groups imported from Azure AD, a regular expression can be used as the following:
+
+ {{< highlight python >}}
+ DD_SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_GROUPS_FILTER='^team-.*' # or 'teamA|teamB|groupC'
+ {{< /highlight >}}
+
+### Automatic Cleanup of User-Groups
+
+To prevent authorization creep, old Azure AD groups a user is not having anymore can be deleted with the following environment parameter:
+
+ {{< highlight python >}}
+ DD_SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS=True
+ {{< /highlight >}}
+
+When a user is removed from a given group in Azure AD, they will also be removed from the corresponding group in DefectDojo.
+If there is a group in DefectDojo, that no longer has any members, it will be left as is for record purposes.
+
## Gitlab
In a similar fashion to that of Google and OKTA, using Gitlab as a
OAuth2 provider carries the same attributes and a similar procedure.
Follow along below.
-1. Navigate to your Gitlab settings page and got to the Applications
+1. Navigate to your Gitlab settings page and got to the Applications
section
-
- **OR**
- [https://the_hostname_you_have_gitlab_deployed:your_gitlab_port/profile/applications](https://the_hostname_you_have_gitlab_deployed:your_gitlab_port/profile/applications)
-2. Choose a name for your application
-3. For the Redirect URI, enter the DefectDojo URL with the following
+2. Choose a name for your application
+3. For the Redirect URI, enter the DefectDojo URL with the following
format
- [https://the_hostname_you_have_dojo_deployed:your_server_port/complete/gitlab/](https://the_hostname_you_have_dojo_deployed:your_server_port/complete/gitlab/)
-4. Edit the settings (see [Configuration]({{< ref "/getting_started/configuration" >}})) with the following
+4. Edit the settings (see [Configuration]({{< ref "/getting_started/configuration" >}})) with the following
information:
{{< highlight python >}}
@@ -227,14 +261,93 @@ Follow along below.
DD_SOCIAL_AUTH_GITLAB_PROJECT_AUTO_IMPORT = True
{{< /highlight >}}
-5. Restart DefectDojo, and you should now see a **Login with Gitlab**
+ **Important:** if you enable this setting on already working instance with gitlab integrations, it will require new grant "read_repository" by user
+
+5. Restart DefectDojo, and you should now see a **Login with Gitlab**
button on the login page.
+## Keycloak
+There is also an option to use Keycloak as OAuth2 provider in order to authenticate users to Defect Dojo, also by using
+the social-auth plugin.
+
+Here are suggestion on how to configure Keycloak and DefectDojo:
+
+### Configure Keycloak
+(assuming you already have an existing realm, otherwise create one)
+1. Navigate to your keycloak realm and add a new client of type openid-connect. Choose a name for the client id and use this value below for DD_SOCIAL_AUTH_KEYCLOAK_KEY).
+2. In the client settings:
+ * Set `access type` to `confidential`
+ * Under `valid Redirect URIs`, add the URI to your defect dojo installation, e.g. 'https:///*'
+ * Under `web origins`, add the same (or '+')
+ * Under `Fine grained openID connect configuration` -> `user info signed response algorithm`: set to `RS256`
+ * Under `Fine grained openID connect configuration` -> `request object signature algorithm`: set to `RS256`
+ * -> save these settings in keycloak (hit save button)
+3. Under `Scope` -> `Full Scope Allowed` set to `off`
+4. Under `mappers` -> add a custom mapper here:
+ * Name: `aud`
+ * Mapper type: `audience`
+ * Included audience: select your client/client-id here
+ * Add ID to token: `off`
+ * Add access to token: `on`
+5. Under `credentials`: copy the secret (and use as DD_SOCIAL_AUTH_KEYCLOAK_SECRET below)
+6. In your realm settings -> keys: copy the "Public key" (signing key) (use for DD_SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY below)
+7. In your realm settings -> general -> endpoints: look into openId endpoint configuration
+ and look up your authorization and token endpoint (use them below)
+
+### Configure Defect Dojo
+Edit the settings (see [Configuration]({{< ref "/getting_started/configuration" >}})) with the following
+ information:
+
+ {{< highlight python >}}
+ DD_SESSION_COOKIE_SECURE=True,
+ DD_CSRF_COOKIE_SECURE=True,
+ DD_SECURE_SSL_REDIRECT=True,
+ DD_SOCIAL_AUTH_KEYCLOAK_OAUTH2_ENABLED=True,
+ DD_SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY=(str, ''),
+ DD_SOCIAL_AUTH_KEYCLOAK_KEY=(str, ''),
+ DD_SOCIAL_AUTH_KEYCLOAK_SECRET=(str, ''),
+ DD_SOCIAL_AUTH_KEYCLOAK_AUTHORIZATION_URL=(str, ''),
+ DD_SOCIAL_AUTH_KEYCLOAK_ACCESS_TOKEN_URL=(str, '')
+ {{< /highlight >}}
+
+or, alternatively, for helm configuration, add this to the `extraConfig` section:
+
+```
+DD_SESSION_COOKIE_SECURE: 'True'
+DD_CSRF_COOKIE_SECURE: 'True'
+DD_SECURE_SSL_REDIRECT: 'True'
+DD_SOCIAL_AUTH_KEYCLOAK_OAUTH2_ENABLED: 'True'
+DD_SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY: ''
+DD_SOCIAL_AUTH_KEYCLOAK_KEY: ''
+DD_SOCIAL_AUTH_KEYCLOAK_SECRET: ''
+DD_SOCIAL_AUTH_KEYCLOAK_AUTHORIZATION_URL: ''
+DD_SOCIAL_AUTH_KEYCLOAK_ACCESS_TOKEN_URL: ''
+```
+
+Optionally, you *can* set `DD_SOCIAL_AUTH_KEYCLOAK_LOGIN_BUTTON_TEXT` in order to customize the login button's text caption.
+
+## GitHub Enterprise
+1. Navigate to your GitHub Enterprise Server and follow instructions to create a new OAuth App [https://docs.github.com/en/enterprise-server/developers/apps/building-oauth-apps/creating-an-oauth-app](https://docs.github.com/en/enterprise-server/developers/apps/building-oauth-apps/creating-an-oauth-app)
+2. Choose a name for your application
+3. For the Redirect URI, enter the DefectDojo URL with the following
+ format
+ - [https://the_hostname_you_have_dojo_deployed:your_server_port/complete/github-enterprise/](https://the_hostname_you_have_dojo_deployed:your_server_port/complete/github-enterprise/)
+4. Edit the settings (see [Configuration]({{< ref "/getting_started/configuration" >}})) with the following
+ information:
+ {{< highlight python >}}
+ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY=(str, 'GitHub Enterprise OAuth App Client ID'),
+ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET=(str, 'GitHub Enterprise OAuth App Client Secret'),
+ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_URL=(str, 'https://github..com/'),
+ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL=(str, 'https://github..com/api/v3/'),
+ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_OAUTH2_ENABLED = True,
+ {{< /highlight >}}
+5. Restart DefectDojo, and you should now see a **Login with GitHub Enterprise**
+ button on the login page.
+
## SAML 2.0
In a similar direction to OAuth, this SAML addition provides a more secure
perogative to SSO. For definitions of terms used and more information,
-see the plugin [plugin
-homepage](https://github.com/IdentityPython/djangosaml2).
+see the plugin [plugin homepage](https://github.com/IdentityPython/djangosaml2).
1. Navigate to your SAML IdP and find your metadata
2. Edit the settings (see [Configuration]({{< ref "/getting_started/configuration" >}})) with the following
@@ -262,6 +375,8 @@ homepage](https://github.com/IdentityPython/djangosaml2).
NOTE: *DD_SAML2_ATTRIBUTES_MAP* in k8s can be referenced as extraConfig (e.g. `DD_SAML2_ATTRIBUTES_MAP: 'Email'='email', 'Username'='username'...`)
+NOTE: *DD_SITE_URL* might also need to be set depending on the choices you make with the metadata.xml provider. (File versus URL).
+
4. Checkout the SAML section in dojo/`dojo/settings/settings.dist.py` and verfiy if it fits your requirement. If you need help, take a look at the [plugin
documentation](https://djangosaml2.readthedocs.io/contents/setup.html#configuration).
@@ -299,6 +414,26 @@ Up to relase 1.15.0 the SAML integration was based on [https://github.com/fangli
* DD_SAML2_ATTRIBUTES_MAP: Syntax has changed
* DD_SAML2_CREATE_USER: Default value changed to False, to avoid security breaches
+## RemoteUser
+
+This implementation is suitable if the DefectDojo instance is placed behind HTTP Authentication Proxy.
+Dojo expects that the proxy will perform authentication and pass HTTP requests to the Dojo instance with filled HTTP headers.
+The proxy should check if an attacker is not trying to add a malicious HTTP header and bypass authentication.
+
+Values which need to be set:
+
+* `DD_AUTH_REMOTEUSER_ENABLED` - Needs to be set to `True`
+* `DD_AUTH_REMOTEUSER_USERNAME_HEADER` - Name of the header which contains the username
+* `DD_AUTH_REMOTEUSER_EMAIL_HEADER`(optional) - Name of the header which contains the email
+* `DD_AUTH_REMOTEUSER_FIRSTNAME_HEADER`(optional) - Name of the header which contains the first name
+* `DD_AUTH_REMOTEUSER_LASTNAME_HEADER`(optional) - Name of the header which contains the last name
+* `DD_AUTH_REMOTEUSER_GROUPS_HEADER`(optional) - Name of the header which contains the comma-separated list of groups; user will be assigned to these groups (missing groups will be created)
+* `DD_AUTH_REMOTEUSER_GROUPS_CLEANUP`(optional) - Same as [#automatic-import-of-user-groups](AzureAD implementation)
+* `DD_AUTH_REMOTEUSER_TRUSTED_PROXY` - Comma separated list of proxies; Simple IP and CIDR formats are supported
+* `DD_AUTH_REMOTEUSER_LOGIN_ONLY`(optional) - Check [Django documentation](https://docs.djangoproject.com/en/3.2/howto/auth-remote-user/#using-remote-user-on-login-pages-only)
+
+*WARNING:* There is possible spoofing of headers (for all `DD_AUTH_REMOTEUSER_xxx_HEADER` values). Read Warning in [Django documentation](https://docs.djangoproject.com/en/3.2/howto/auth-remote-user/#configuration)
+
## User Permissions
When a new user is created via the social-auth, only the default permissions are active. This means that the newly created user does not have access to add, edit, nor delete anything within DefectDojo. There are two parameters in the System Settings to influence the permissions for newly created users:
@@ -307,28 +442,27 @@ When a new user is created via the social-auth, only the default permissions are
When both the parameters `Default group` and `Default group role` are set, the new user will be a member of the given group with the given role, which will give him the respective permissions.
-### Staff user ###
-
-Newly created users are neither staff nor superuser by default. The `is_staff` flag of a new user will be set to `True`, if the user's email address matches the regular expression in the parameter `Email pattern for staff users`.
+### Groups from Identity Providers
-**Example:**
-
-`.*@example.com` will make `alice@example.com` a staff user, while `bob@partner.example.com` or `chris@example.org` will be non-staff users.
+Some Identity Providers are able to send list of groups to which should user belongs. This functionality is implemented only for Identity Providers mentioned below. For all others, we will be more than happy for contribution (hint: functions `assign_user_to_groups` and `cleanup_old_groups_for_user` from [`dojo/pipeline.py`](https://github.com/DefectDojo/django-DefectDojo/blob/master/dojo/pipeline.py) might be useful).
+- [Azure](#automatic-import-of-user-groups): Check `DD_SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_GET_GROUPS` and `DD_SOCIAL_AUTH_AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS`
+- [RemoteUser](#remoteuser): Check `DD_AUTH_REMOTEUSER_GROUPS_HEADER` and `DD_AUTH_REMOTEUSER_GROUPS_CLEANUP`
## Login speed-up
-If you are using only one Social authentication and you are not using the standard login mechanism (`SHOW_LOGIN_FORM` is
-set to `False`), showing login page could be useless because every time user clicks on the only existing button on the
-page like "Login with SAML" (or another similar button). If you set `SOCIAL_LOGIN_AUTO_REDIRECT` to `True`, the login
-page is skipped and the user is automatically redirected to the identity provider's page.
+You can bypass the login form if you are only using SSO/Social authentication for login in by enabling these two environment variables:
+
+```
+DD_SOCIAL_LOGIN_AUTO_REDIRECT: "true"
+DD_SOCIAL_AUTH_SHOW_LOGIN_FORM: "false"
+```
### Login form fallback
If you are using "login speed-up", it can be useful to be able to login by the standard way, for example when an admin
-user needs to log in because of a change of some settings or permissions. Accessing
-[`/login?force_login_form`](https:///login?force_login_form) shows login form even "login speed-up" is
-enabled.
+user needs to log in because of a change of some settings or permissions. This feature is accessible by a visiting the URL
+`/login?force_login_form`.
## Other Providers
diff --git a/docs/content/en/integrations/source-code-repositories.md b/docs/content/en/integrations/source-code-repositories.md
index b8a0f2fe19d..7b7f5f04671 100644
--- a/docs/content/en/integrations/source-code-repositories.md
+++ b/docs/content/en/integrations/source-code-repositories.md
@@ -7,18 +7,47 @@ weight: 5
Findings can have a filepath and a line number as the location of the vulnerability. This is typically set when scanning an application with a Static Application Security Test (SAST) tool. If the repository of the source code is specified in the Engagement, DefectDojo will present the filepath as a link and the user can navigate directly to the location of the vulnerability.
-## Setting the repository in the Engagement
+## Setting the repository in the Engagement and Test
-While editing the Engagement, users can set the URL of the repo. It needs to be the URL including the branch, e.g. https://github.com/DefectDojo/django-DefectDojo/tree/dev (GitHub) or https://gitlab.com/gitlab-org/gitlab/-/tree/master (GitLab).
+### Engagement
+While editing the Engagement, users can set the URL of the specific SCM repo.
+For Interactive Engagement it needs to be the URL including the branch:
+- for GitHub - like https://github.com/DefectDojo/django-DefectDojo/tree/dev
+![Edit Engagement (GitHub)](../../../static/images/source-code-repositories_1.png)
+- for GitLab - like https://gitlab.com/gitlab-org/gitlab/-/tree/master
+![Edit Engagement (Gitlab)](../../../static/images/source-code-repositories-gitlab_1.png)
+- for public BitBucket - like (like git clone url)
+![Edit Engagement (Bitbucket public)](../../../static/images/source-code-repositories-bitbucket_1.png)
+- for standalone/onpremise BitBucket https://bb.example.com/scm/some-project/some-repo.git or https://bb.example.com/scm/some-user-name/some-repo.git for user public repo (like git clone url)
+![Edit Engagement (Bitbucket standalone)](../../../static/images/source-code-repositories-bitbucket-onpremise_1.png)
+
+For CI/CD Engagement, where user could set commit hash, branch/tag and code line it should look like examples below:
+- for GitHub - like https://github.com/DefectDojo/django-DefectDojo
+- for GitLab - like https://gitlab.com/gitlab-org/gitlab
+- for public BitBucket - like https://bitbucket.org/some-user/some-project.git (like git clone url)
+- for standalone/onpremise BitBucket https://bb.example.com/scm/some-project.git or https://bb.example.com/scm/some-user-name/some-repo.git for user public repo (like git clone url)
+
+If user does not set commit hash or branch/tag in appropriate fields of CI/CD Engagement edit form, the URL should look like in Interactive Engagement edit form.
+
+SCM navigation URL is composed from Repo URL using SCM Type. Github/Gitlab SCM type is default, but user could set certain SCM type in Product custom field "scm-type".
+
+Product custom fields:
+
+![Product custom fields](../../../static/images/product-custom-fields_1.png)
+
+Product SCM type add:
+
+![Product scm type](../../../static/images/product-scm-type_1.png)
+
+Possible SCM types could be 'github', 'gitlab', 'bitbucket', 'bitbucket-standalone' or nothing (for default github).
-![Edit Engagement](../../images/source-code-repositories_1.png)
## Link in Finding
When viewing a finding, the location will be presented as a link, if the repository of the source code has been set in the Engagement:
-![Link to location](../../images/source-code-repositories_2.png)
+![Link to location](../../../static/images/source-code-repositories_2.png)
Clicking on this link will open a new tab in the browser, with the source file of the vulnerability at the corresponding line number:
-![View in repository](../../images/source-code-repositories_3.png)
+![View in repository](../../../static/images/source-code-repositories_3.png)
diff --git a/docs/content/en/usage/features.md b/docs/content/en/usage/features.md
index 3b9a9f74444..470c009bf71 100644
--- a/docs/content/en/usage/features.md
+++ b/docs/content/en/usage/features.md
@@ -1,14 +1,150 @@
---
title: "Features"
-description: "Various features help manage the findings."
+description: "Various features help manage vulnerabilities."
draft: false
weight: 2
---
+## Tags
+
+In DefectDojo, tags are a first class citizen and are recognized as the facilitators
+of organization within each level of the [data model](../models). Tags are
+ideal for grouping objects in a manner that can be filtered out into smaller, more
+digestible chunks.
+
+Here is an example with a product with two tags and four findings each with a single tag
+
+![High level example of usage with tags](../../images/tags-high-level-example.png)
+
+### Adding and Removing
+
+Tags can be managed in the following ways
+
+1. Creating or Editing new objects
+
+ When a new object is created or edited through the UI or API, there is a field for specifying
+ the tags to be set on a given object. This field is a multiselect field that also has
+ auto completion to make searching and adding existing tags a breeze. Here is what the field
+ looks like on the product from the screenshot in the previous section:
+
+ ![Tag management on an object](../../images/tags-management-on-object.png)
+
+2. Import and Reimport
+
+ Tags can also be applied to a given test at the time of import or reimport. This is a very
+ handy use case when importing via the API with automation as it provides an opportunity to
+ append automation run details and tool information that may not be captured in the test
+ or finding object directly.
+
+ The field looks and behaves exactly as it does on a given object
+
+3. Bulk Edit Menu (Findings only)
+
+ When needing to update many findings with the same set of tags, the bulk edit menu can be
+ used to ease the burden.
+
+ In the following example, lets say I want to update the tags of the two findings with the tag "tag-group-alpha" to be a new tag list like this ["tag-group-charlie", "tag-group-delta"].
+ First I would select the tags to be updated:
+
+ ![Select findings for bulk edit tag update](../../images/tags-select-findings-for-bulk-edit.png)
+
+ Once a finding is selected, a new button appears with the name "Bulk Edit". Clicking this button
+ produces a dropdown menu with many options, but the focus is just on tags for now. Update the
+ field to have the desired tag list as follows, and click submit
+
+ ![Apply changes for bulk edit tag update](../../images/tags-bulk-edit-submit.png)
+
+ The tags on the selected Findings will be updated to whatever was specified in the tags field
+ within the bulk edit menu
+
+ ![Completed bulk edit tag update](../../images/tags-bulk-edit-complete.png)
+
+### Filtering
+
+Tags can be filtered in many ways through both the UI and the API. For example, here is a snippet
+of the Finding filters:
+
+![Snippet of the finding filters](../../images/tags-finding-filter-snippet.png)
+
+There are ten fields related to tags:
+
+ - Tags: filter on any tags that are attached to a given Finding
+ - Examples:
+ - Finding will be returned
+ - Finding Tags: ["A", "B", "C"]
+ - Filter Query: "B"
+ - Finding Will *not* be returned
+ - Finding Tags: ["A", "B", "C"]
+ - Filter Query: "F"
+ - Not Tags: filter on any tags that are *not* attached to a given Finding
+ - Examples:
+ - Finding will be returned
+ - Finding Tags: ["A", "B", "C"]
+ - Filter Query: "F"
+ - Finding Will *not* be returned
+ - Finding Tags: ["A", "B", "C"]
+ - Filter Query: "B"
+ - Tag Name Contains: filter on any tags that contain part or all of the query in the given Finding
+ - Examples:
+ - Finding will be returned
+ - Finding Tags: ["Alpha", "Beta", "Charlie"]
+ - Filter Query: "et" (part of "Beta")
+ - Finding Will *not* be returned
+ - Finding Tags: ["Alpha", "Beta", "Charlie"]
+ - Filter Query: "meg" (part of "Omega")
+ - Not Tags: filter on any tags that do *not* contain part or all of the query in the given Finding
+ - Examples:
+ - Finding will be returned
+ - Finding Tags: ["Alpha", "Beta", "Charlie"]
+ - Filter Query: "meg" (part of "Omega")
+ - Finding Will *not* be returned
+ - Finding Tags: ["Alpha", "Beta", "Charlie"]
+ - Filter Query: "et" (part of "Beta")
+
+For the other six tag filters, they follow the same rules as "Tags" and "Not Tags" as above,
+but at different levels in the data model:
+
+ - Tags (Test): filter on any tags that are attached to the Test of a given Finding is part of
+ - Not Tags (Test): filter on any tags that are *not* attached to the Test of a given Finding is part of
+ - Tags (Engagement): filter on any tags that are attached to the Engagement of a given Finding is part of
+ - Not Tags (Engagement): filter on any tags that are *not* attached to the Engagement of a given Finding is part of
+ - Tags (Product): filter on any tags that are attached to the Product of a given Finding is part of
+ - Not Tags (Product): filter on any tags that are *not* attached to the Product of a given Finding is part of
+
+### Tag Inheritance
+
+When enabled, tags applied to a given product will automatically be applied to all objects under
+products in the [data model](../models.md).
+
+#### Configuration
+
+Tag Inheritance can be enabled at the following scope levels:
+- Global Scope
+ - Every product system wide will begin applying tags to all children objects
+ - This is set within the System Settings
+- Product Scope
+ - Only the selected product will begin applying tags to all children objects
+ - This is set at the product creation/edit page
+
+#### Behaviors
+
+Tags can be added and removed to other objects the same as when tag inheritance
+is disabled. The only exception to that rule being inherited tags as they cannot
+be removed from an object. See the following example of adding a tag "test_only_tag"
+to the Test object and a tag "engagement_only_tag" to the Engagement.
+
+![Example of inherited tags](../../images/tags-inherit-exmaple.png)
+
+When updates are made to the tag list on a product, the same changes are made to all
+objects within the product asynchronously. The duration of this task directly correlates
+to the number the objects contained within a finding. If the results are not observed
+within a reasonable time period, consult the celery worker logs to identify where any
+problems might have arisen.
+
## Risk Acceptance
Findings cannot always be remediated or addressed for various reasons. A
-finding status can change to accepted by doing the following. Findings
+finding \'status\' can be change to \'accepted\' by doing the following: Findings
are accepted in the engagement view. To locate the engagement from the
finding click the link to engagement as shown below.
@@ -37,7 +173,7 @@ deduplication on engagement and deduplication on product level:
![Deduplication on product and engagement level](../../images/deduplication.png)
-Upon saving a finding, defectDojo will look at the other findings in the
+Upon saving a finding, DefectDojo will look at the other findings in the
product or the engagement (depending on the configuration) to find
duplicates
@@ -55,7 +191,7 @@ Deduplicate vulnerabilities in the same build/release. The vulnerabilities may b
detecting duplicates across scanners is not trivial as it
requires a certain standardization.
-Track unique vulnerabilities across builds/releases so that defectDojo knows when it finds a vulnerability whether it has seen it before.
+Track unique vulnerabilities across builds/releases so that DefectDojo knows when it finds a vulnerability that has seen it before.
: this allows you keep information attached to a given finding
in a unique place: all further duplicate findings will point
@@ -74,21 +210,26 @@ configured.
#### Engagement configuration
-When creating an engagement or later by editing the engagement, the
+When creating or editing an engagement, the
\"Deduplication within engagement only\" checkbox can be ticked.
- If activated: Findings are only deduplicated within the same
engagement. Findings present in different engagements cannot be
duplicates
-- Else: Findings are deduplicated across the whole product
+- Otherwise: Findings are deduplicated across the whole product
-Note that deduplication can never occur across different products.
+Note that currently deduplication does not occur across different products.
### Deduplication algorithms
The behavior of the deduplication can be configured for each parser in
settings.dist.py (or settings.py after install) by configuring the
-`DEDUPLICATION_ALGORITHM_PER_PARSER` variable.
+`DEDUPLICATION_ALGORITHM_PER_PARSER` variable, or via the env variable (useful for Kubernetes deployments) `DD_DEDUPLICATION_ALGORITHM_PER_PARSER` with a JSON string like
+```json
+{"ScannerName":"algorithm"}
+```
+The environment variable will override the settings in `settings.dist.py`, replacing by matching the keys.
+
The available algorithms are:
@@ -152,7 +293,11 @@ DEDUPE_ALGO_LEGACY
The hash_code computation can be configured for each parser using the
parameter `HASHCODE_FIELDS_PER_SCANNER` in
-`settings.dist.py`.
+`settings.dist.py`, or via the env variable (useful for Kubernetes deployments) `DD_HASHCODE_FIELDS_PER_SCANNER` with a JSON string like
+```json
+{"ScannerName":["field1", "field2"]}
+```
+The environment variable will override the settings in `settings.dist.py`, replacing by matching the keys.
The parameter `HASHCODE_ALLOWED_FIELDS` list the fields
from finding table that were tested and are known to be working when
@@ -195,7 +340,7 @@ Tips:
When you change the hashcode configuration, it is needed to regenerated the hashcodes for all findings,
or at least those findings found by scanners for which the configuration was updated.
-This is sometimes also needed after an upgrade to a new Defect Dojo version, for example when we made changes
+This is sometimes also needed after an upgrade to a new DefectDojo version, for example when we made changes
to the hashcode configuration or calculation logic. We will mention this in the upgrade notes.
To regenerate the hashcodes, use the `dedupe` management command:
@@ -206,7 +351,7 @@ docker-compose exec uwsgi ./manage.py dedupe --hash_code_only
This will only regenerated the hashcodes, but will not run any deduplication logic on existing findings.
If you want to run deduplication again on existing findings to make sure any duplicates found by the new
-hashcode config are marked as such, run
+hashcode config are marked as such, run:
{{< highlight bash >}}
docker-compose exec uwsgi ./manage.py dedupe
@@ -232,13 +377,17 @@ details about the deduplication process : switch
### Deduplication - APIv2 parameters
-- `skip_duplicates`: if true, duplicates are not
+- `skip_duplicates`: if true, duplicates are not
inserted at all
-- `close_old_findings` : if true, findings that are not
+- `close_old_findings` : if true, findings that are not
duplicates and that were in the previous scan of the same type
- (example ZAP) for the same product (or engagement in case of
- \"Deduplication on engagement\") and that are not present in the new
- scan are closed (Inactive, Verified, Mitigated)
+ (example ZAP) for the same engagement (or product in case of
+ \"close_old_findings_product_scope\") and that are not present in the new
+ scan are closed (Inactive, Verified, Mitigated).
+- `close_old_findings_product_scope` : if true, close_old_findings applies
+ to all findings of the same type in the product. Note that
+ \"Deduplication on engagement\" is no longer used to determine the
+ scope of close_old_findings.
### Deduplication / Similar findings
@@ -260,22 +409,10 @@ Similar Findings
which will remove the duplicate status on that finding along with
marking it active again.
-## False Positive Removal
-
-DefectDojo allows users to tune out false positives by enabling False
-Positive History. This will track what engineers have labeled as false
-positive for a specific product and for a specific scanner. While
-enabled, when a tool reports the same issue that has been flagged as a
-false positive previously, it will automatically mark the finding as a
-false positive, helping to tune overly verbose security tools.
-
-False Positive Removal is not needed when using deduplication, and it is
-advised to not combine these two.
-
## Service Level Agreement (SLA)
-DefectDojo allows you to maintain your security SLA and automatically
-remind teams whenever a SLA is about to get breached, or breaches.
+DefectDojo allows you to maintain your security SLAs and automatically
+remind teams whenever a SLA is about to get breached, or is breached.
Simply indicate in the `System Settings` for each severity, how many
days teams have to remediate a finding.
@@ -284,25 +421,20 @@ days teams have to remediate a finding.
### SLA notification configuration
-There are 5 variables in the settings.py file that you can configure, to
-act on the global behavior. By default, any findings across the instance
-that are in `Active, Verified` state will be considered for
-notifications.
+There are 3 variables in the system settings that can be set for notifcations of SLA breaches.
+By default notifications are disabled.
+You can either choose to notify about breaches for findings that are only in 'Active' or
+for any findings across the instance that are in `Active, Verified`.
+Furthermore, it is possible choose to only consider findings that have a JIRA issue linked to them.
+
+There are 2 variables in the settings.py file that you can configure, to
+act on the global behavior.
{{< highlight python >}}
-SLA_NOTIFY_ACTIVE = False
-SLA_NOTIFY_ACTIVE_VERIFIED_ONLY = True
-SLA_NOTIFY_WITH_JIRA_ONLY = False
SLA_NOTIFY_PRE_BREACH = 3
SLA_NOTIFY_POST_BREACH = 7
{{< / highlight >}}
-Setting both `SLA_NOTIFY_ACTIVE` and `SLA_NOTIFY_ACTIVE_VERIFIED_ONLY`
-to `False` will effectively disable SLA notifications.
-
-You can choose to only consider findings that have a JIRA issue linked
-to them. If so, please set `SLA_NOTIFY_WITH_JIRA_ONLY` to `True`.
-
The `SLA_NOTIFY_PRE_BREACH` is expressed in days. Whenever a finding\'s
\"SLA countdown\" (time to remediate) drops to this number, a
notification would be sent everyday, as scheduled by the crontab in
@@ -321,7 +453,7 @@ through CI in \'active\' state.
### What notification channels for SLA notifications?
-The same as usual. You will notice that an extra `SLA breach` option is now present
+You will notice that an extra `SLA breach` option is now present
on the `Notification` page and also in the `Product` view.
![SLA notification checkbox](../../images/sla_notification_product_checkboxes.png)
@@ -329,8 +461,8 @@ on the `Notification` page and also in the `Product` view.
### SLA notification with JIRA
You can choose to also send SLA notification as JIRA comments, if your
-product is configured with JIRA. You can enable it at the JIRA
-configuration level or at the Product level.
+product is configured with JIRA. You can enable this at the Product level in the Product specific
+JIRA settings.
The Product level JIRA notification configuration takes precendence over
the global JIRA notification configuration.
@@ -425,6 +557,9 @@ Product Type Counts
![Product Type Counts](../../images/met_2.png)
+Product Tag Counts
+: Same as above, but for a group of products sharing a tag.
+
Simple Metrics
: Provides tabular data for all Product Types. The data displayed in
this view is the total number of S0, S1, S2, S3, S4, Opened This
@@ -455,9 +590,6 @@ Active
: Designates whether this user should be treated as active and can login to DefectDojo.
Unselect this instead of deleting accounts.
-Staff status
-: Staff users have some more permissions than non-staff users, see [System wide permissions]({{< ref "permissions#system-wide-permissions" >}})
-
Superuser status
: Designates that this user can configure the system and has all permissions
for objects without explicitly assigning them.
@@ -541,4 +673,4 @@ feedback.internal.google.com (endpoint) -> [ team:human resources, public_facing
Endpoint Meta Importer can be found in the Endpoint tab when viewing a Product
-**Note:** The field "hostname" is required as it is used to query/create endpoints.
\ No newline at end of file
+**Note:** The field "hostname" is required as it is used to query/create endpoints.
diff --git a/docs/content/en/usage/models.md b/docs/content/en/usage/models.md
index b02c8c3b3bb..540e256cee2 100644
--- a/docs/content/en/usage/models.md
+++ b/docs/content/en/usage/models.md
@@ -1,6 +1,6 @@
---
title: "Core data classes"
-description: "DefectDojo is based on a model that allows high flexibility for your test tracking needs."
+description: "DefectDojo is made to be flexible to conform to your program, rather than making your team conform to the tool."
draft: false
weight: 1
---
diff --git a/docs/content/en/usage/performance.md b/docs/content/en/usage/performance.md
new file mode 100644
index 00000000000..d7957ddb724
--- /dev/null
+++ b/docs/content/en/usage/performance.md
@@ -0,0 +1,40 @@
+---
+title: "Performance Enhancements"
+description: "Settings to configure to enhance performance in DefectDojo"
+draft: false
+weight: 4
+---
+
+## Asynchronous Import
+
+DefectDojo offers an experimental feature to aynschronously import security reports.
+This feature works in most use cases, but struggles when doing things such as pushing
+to Jira during the import process. Because Endpoints are still being processed and
+created even after the import procedure is completed, pushing Findings to Jira can
+result in incomplete Jira tickets. It is advised to wait until after import has been
+completed (reaches 100%).
+
+To enable this feature, set `ASYNC_FINDING_IMPORT` to True in `local_settings.py`
+
+## Asynchronous Delete
+
+For larger instances, deleting an object can take minutes for all related objects to be
+expanded into memory, rendered on the page, and then removing all objects from the database.
+To combat this issue, two settings can be set in `local_settings.py`:
+
+#### ASYNC_OBJECT_DELETE
+
+Deleting an object asynchronously changes the way an object is deleted under the hood. By removing
+the need to expand into memory, a lot of time (and memory) can be saved by offloading the lookups and
+removals onto celery processes. This process works by starting at the bottom of a given object, and
+walking the tree upwards rather than downwards. This way, objects can be seperated into buckets,
+and then deleted.
+
+#### DELETE_PREVIEW
+
+Previewing all the objects to be deleted takes almost as much time as deleting the objects itself.
+This is a safety feature intended to warn users of what they are about to delete, as well as educating
+users of how the delete functionality works by cascade deleting all related objects. With this feature enabled,
+the user will only see the following text in the delete preview (without any database lookups)
+
+`Previewing the relationships has been disabled.`
diff --git a/docs/content/en/usage/permissions.md b/docs/content/en/usage/permissions.md
index 5e13fc3891e..512484d51b2 100644
--- a/docs/content/en/usage/permissions.md
+++ b/docs/content/en/usage/permissions.md
@@ -7,8 +7,8 @@ draft: false
## System-wide permissions
-* Administrators (aka super users) have no limitations in the system. They can change all settings, manage users and have read and write access to all data.
-* Staff users can add Product Types, and have access to data according to their role in a Product or Product Type. There is the parameter `AUTHORIZATION_STAFF_OVERRIDE` in the settings to give all staff users full access to all Products and Product Types.
+* Administrators (aka superusers) have no limitations in the system. They can change all settings, manage users and have read / write access to all data.
+* Staff users can add Product Types, and have access to data according to their role in a Product or Product Type.
* Regular users have limited functionality available. They cannot add Product Types but have access to data according to their role in a Product or Product Type
## Product and Product Type permissions
@@ -36,14 +36,14 @@ Users can be assigned as members to Products and Product Types, giving them one
| Delete Product | | | | x | |
| | | | | | |
| View Engagement | x | x | x | x | x |
-| Add Engagement | | x | x | x | |
-| Edit Engagement | | x | x | x | |
+| Add Engagement | | x | x | x | x |
+| Edit Engagement | | x | x | x | x |
| Risk Acceptance | | x | x | x | |
| Delete Engagement | | | x | x | |
| | | | | | |
| View Test | x | x | x | x | x |
| Add Test | | x | x | x | |
-| Edit Test | | x | x | x | |
+| Edit Test | | x | x | x | x |
| Delete Test | | | x | x | |
| | | | | | |
| View Finding | x | x | x | x | x |
@@ -73,7 +73,7 @@ Users can be assigned as members to Products and Product Types, giving them one
| Delete Note | (x) 2) | (x) 2) | x | x | |
-1) Every staff user and administrator can add Product Types. Regular users are not allowed to add Product Types, unless they are Global Owner or Maintainer.
+1) Every superuser can add Product Types. Regular users are not allowed to add Product Types, unless they are a Global Owner or Maintainer.
2) Every user is allowed to edit and delete his own notes.
@@ -105,8 +105,23 @@ The membership of a group itself has a role that determines what permissions the
| Add Group member as Owner | | | x |
| Delete Group | | | x |
-1) Every staff user and administrator can add groups. Regular users are not allowed to add groups.
+1) Every superuser can add groups. Regular users are not allowed to add groups.
The permissions to manage the roles of Products and Product types for a group is defined by the role of the user in the respective Product or Product Type.
Groups can have a global role too. This global role gives all members of the group access to all Product Types and Products, including the underlying data, with permissions according to the respective role.
+
+## Configuration permissions
+
+Many configuration dialogues and API endpoints can be enabled for users or groups of users, regardless of their **superuser** status:
+
+![Configuration permissions](../../images/configuration_permissions.png)
+
+3 configurations can still only be changed by superusers:
+* System settings
+* Notifications on system level
+* Configuration permissions for users and groups
+
+{{% alert title="Warning" color="warning" %}}
+These configuration settings are a powerful tool and should be used with great care.
+{{% /alert %}}
diff --git a/docs/content/en/usage/productgrading.md b/docs/content/en/usage/productgrading.md
new file mode 100644
index 00000000000..88cb88267fa
--- /dev/null
+++ b/docs/content/en/usage/productgrading.md
@@ -0,0 +1,50 @@
+---
+title: "Product Health Grading"
+description: "Products are graded based on their health."
+draft: false
+weight: 2
+---
+
+## Product Health Grading
+
+Within DefectDojo's system settings, you have the opportunity to enable a grading system for your products. For that you have to enable ("Enable Product Grading"). Then, the products are graded with the following possible grades:
+- Grade A
+- Grade B
+- Grade C
+- Grade D
+- Grade F
+
+The best grade is A going down to the worst grade F. By default the grades stick to the achieved percentage mentioned in grade converation [here](https://en.wikipedia.org/wiki/Academic_grading_in_the_United_States).
+
+### Calculation of the grades
+The code that performs the grade calculations can be found [here](https://github.com/DefectDojo/django-DefectDojo/blob/76e11c21e88fb84b67b6da27c78fbbe1899e7e78/dojo/management/commands/system_settings.py#L8).
+
+The highest health score is 100 and it decreases based on the number of findings for each severity (critical, high, medium, low) within the product. In the following code snippet you can see the rules.
+Note that the following abbreviations were used:
+
+- crit: amount of critical findings within the product
+- high: amount of high findings within the product
+- med: amount of medium findings within the product
+- low: amount of low findings within the product
+
+```
+health=100
+if crit > 0:
+ health = 40
+ health = health - ((crit - 1) * 5)
+if high > 0:
+ if health == 100:
+ health = 60
+ health = health - ((high - 1) * 3)
+if med > 0:
+ if health == 100:
+ health = 80
+ health = health - ((med - 1) * 2)
+if low > 0:
+ if health == 100:
+ health = 95
+ health = health - low
+if health < 5:
+ health = 5
+return health
+```
\ No newline at end of file
diff --git a/docs/content/en/usage/questionnaires.md b/docs/content/en/usage/questionnaires.md
new file mode 100644
index 00000000000..9d862e64b7d
--- /dev/null
+++ b/docs/content/en/usage/questionnaires.md
@@ -0,0 +1,125 @@
+---
+title: "Questionnaires"
+description: "Collect information from people internal or external to DefectDojo."
+weight: 3
+draft: false
+---
+
+## Questionnaires
+
+Questionnaires provide a means for collecting information from developers and respective stakeholders. DefectDojo includes functionality to create new questionnaires with custom questions, open questionnaires to receive responses for certain time periods from insiders or outsiders, and connect questionnaires with new or existing engagements.
+
+## Creating a New Questionnaire
+
+To access, create, or modify new/existing questionnaires, navigate to the _All Questionnaires_ dashboard from the sidebar.
+
+![Questionnaires Location](../../images/questionnaires-sidebar.png)
+
+On the questionnaire dashboard, all existing questionnaires are displayed. To quickly find a questionnaire, the filters may be used to search for snippets within the questionnaire name and/or description, as well as by active/inactive status.
+
+When questionnaires are open for responses, they will be displayed in the _General Questionnaires_ block towards the bottom of the page.
+
+To begin the process of creating a new questionnaire, select the _Create Questionnaire_ button located in the top right of the questionnaire dashboard.
+
+![Questionnaires Home View](../../images/questionnaires-main-view.png)
+
+Questionnaires have a name and description, as well as an activity status, which are initially set on questionnaire creation, but can be modified in the future if necessary. Once these fields are filled in appropriately, the user can create the questionnaire without any questions (by selecting _Create Questionnaire_), or with questions (by selecting _Create Questionnaire and Add Questions_).
+
+![Create New Questionnaire](../../images/questionnaires-create-new.png)
+
+To add questions to a questionnaire, select the dropdown titled _Select as many Questions as applicable_, which will open all of the existing questions within DefectDojo. Once the desired questions are selected from the list, the dropdown can be closed, and the _Update Questionnaire Questions_ can be selected to save the newly created questionnaire.
+
+_Note_: New questions may also be added at the time of questionnaire creation by selecting the plus located next to the questions dropdown.
+
+![Select Questions](../../images/questionnaires-select-questions.png)
+
+## Creating New Questions
+
+The questions dashboard displays all of the questions that may exist as part of questionnaires within DefectDojo. Similar to questionnaires, to quickly find a question, the filters may be used to search for optional status, or snippets within the question name and/or description. Two types of questions exist within DefectDojo questionnaires: _Text Questions_ and _Multiple Choice Questions_. To add a new question, select the _Create Question_ button located in the top right of the questions dashboard.
+
+![Questionnaire Questions](../../images/questionnaires-questions.png)
+
+#### Adding Text Questions
+
+To add a text question (open-ended), fill out the add question form, where:
+ - **Type** - The type of question being created, in this case _Text_.
+ - **Order** - The order of a question describes its position in a questionnaire relative to other questions (e.g., an order of _1_ will put the question higher than a question with order _4_).
+ - **Optional** - When the optional box is checked, a question will not be required in a questionnaire.
+ - **Question Text** - The text that is displayed to prompt a user for their answer (e.g. What is your favorite color?).
+
+![Add Text Answer Question](../../images/questionnaires-open-ended.png)
+
+#### Adding Multiple Choice Questions
+
+Similar to the process of adding a text question, choice questions (non-open-ended) allow the user to pick from a given list of choices. To add a choice question, fill out the add question form, where:
+ - **Type** - The type of question being created, in this case _Choice_.
+ - **Order** - The order of a question describes its position in a questionnaire relative to other questions (e.g., an order of _1_ will put the question higher than a question with order _4_).
+ - **Optional** - When the optional box is checked, a question will not be required in a questionnaire.
+ - **Multichoice** - When the multichoice box is checked, multiple choices from the list of choices may be selected by the user.
+ - **Answer Choices** - The possible answer choices that may be selected by a user.
+
+![Add Multiple Choice Question](../../images/questionnaires-multiple-choice.png)
+
+## Publishing a Questionnaire
+
+Once a questionnaire has been successfully created, it can be published to accept responses. To publish a questionnaire, select the plus located to the right of _General Questionnaires_.
+
+![Add General Questionnaire](../../images/questionnaires-main-view.png)
+
+This will prompt for a specific questionnaire to be selected, as well as a date the questionnaire response window should close. The response window sets a due date for recipients. Once these two options have been selected, publish the questionnaire by selecting _Add Questionnaire_.
+
+![Publicize Questionnaire](../../images/questionnaires-publicize.png)
+
+Once a questionnaire is published, a link to share it can be retrieved by selecting the _Share Questionnaire_ action. To ensure the newly created questionnaire has been constructed as expected, open the share link and view the newly created questionnaire.
+
+![Share Questionnaire Link](../../images/questionnaires-share.png)
+
+![Responding to Questionnaires](../../images/questionnaires-respond.png)
+
+## Unassigned Questionnaires
+
+When a questionnaire's response window has closed, all of the responses will be saved, and the questionnaire will be listed as an _Unassigned Answered Engagement Questionnaire_ on the DefectDojo dashboard.
+
+There are three actions that may be taken when a questionnaire's response window has closed: _View Responses_, _Create Engagement_, and _Assign User_.
+
+![Unnasigned Questionnaires](../../images/questionnaires-unassigned.png)
+
+#### View Questionnaire Responses
+
+To view the questionnaire responses, select the _View Responses_ action. All of the responses from the questionnaire will be displayed.
+
+![View Questionnaire Responses](../../images/questionnaires-view-responses.png)
+
+#### Create an Engagement From a Questionnaire
+
+To link the questionnaire to a product via an engagement, select the _Create Engagement_ action. Once a product is selected from the dropdown, select _Create Engagement_. This will link the questionnaire results with a new engagement under the selected product, which can then be given specific details similar to other engagements in DefectDojo, such as _Description_, _Version_, _Status_, _Tags_, etc.
+
+![Link Questionnaire to Engagement](../../images/questionnaires-new-engagement.png)
+
+![New Engagement for Questionnaire](../../images/questionnaires-create-engagement.png)
+
+To view a questionnaire at the engagement level, navigate to the engagement linked with the desired questionnaire. Expand the _Additional Features_ menu to reveal a _Questionnaires_ dropdown, which will contain all of the linked questionnaires.
+
+![View Questionnaire from Engagement](../../images/questionnaires-view-questionnaire.png)
+
+#### Assign a Questionnaire to a User
+
+To assign a questionnaire to a user, select the _Assign User_ action. This will prompt for a user to be selected from the dropdown of available users. Once a user is selected, assign the questionnaire to the specified user by selecting _Assign Questionnaire_.
+
+![Assign Questionnaire to User](../../images/questionnaires-assign-user.png)
+
+## Creating Questionnaires From Engagements
+
+While questionnaires are commonly created from the questionnaire dashboard, they can also be created at the engagement level. To create a new questionnaire from within an engagement, expand the _Additional Features_ dropdown to reveal the _Questionnaires_ dropdown. In the right side header of the _Questionnaires_ dropdown, select the plus to link a new questionnaire.
+
+![New Questionnaire from Engagement](../../images/questionnaires-add-from-engagement.png)
+
+Once prompted, select a questionnaire from the available surveys list to link it with the engagement. If the user wishes to leave a response at the time of linking the questionnaire with the engagement, the _Add Questionnaire and Repond_ option may be selected. To simply link the questionnaire with the engagement, select _Add Questionnaire_.
+
+![Select Questionnaire from Engagement](../../images/questionnaires-select-survey.png)
+
+## Anonymous Questionnaires
+
+Questionnaires, by default, are only accessible by DefectDojo users. To allow outside responses to DefectDojo questionnaires, ensure the _Allow Anonymous Survey Reponses_ option within the _System Settings_ is selected. To share a questionnaire with anonymous users, use the questionnaire's _Share Link_.
+
+![Anonymous Survey Reponses](../../images/questionnaires-system-settings.png)
diff --git a/docs/package-lock.json b/docs/package-lock.json
index ccfdc3c5da2..eaf7096132b 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -1,6 +1,1005 @@
{
+ "name": "docs",
+ "lockfileVersion": 2,
"requires": true,
- "lockfileVersion": 1,
+ "packages": {
+ "": {
+ "devDependencies": {
+ "autoprefixer": "10.4.17",
+ "postcss": "8.4.35",
+ "postcss-cli": "11.0.0"
+ }
+ },
+ "node_modules/@nodelib/fs.scandir": {
+ "version": "2.1.5",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
+ "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.stat": "2.0.5",
+ "run-parallel": "^1.1.9"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@nodelib/fs.stat": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
+ "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
+ "dev": true,
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@nodelib/fs.walk": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
+ "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.scandir": "2.1.5",
+ "fastq": "^1.6.0"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@sindresorhus/merge-streams": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-1.0.0.tgz",
+ "integrity": "sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==",
+ "dev": true,
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/anymatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz",
+ "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==",
+ "dev": true,
+ "dependencies": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/autoprefixer": {
+ "version": "10.4.17",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz",
+ "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/autoprefixer"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "browserslist": "^4.22.2",
+ "caniuse-lite": "^1.0.30001578",
+ "fraction.js": "^4.3.7",
+ "normalize-range": "^0.1.2",
+ "picocolors": "^1.0.0",
+ "postcss-value-parser": "^4.2.0"
+ },
+ "bin": {
+ "autoprefixer": "bin/autoprefixer"
+ },
+ "engines": {
+ "node": "^10 || ^12 || >=14"
+ },
+ "peerDependencies": {
+ "postcss": "^8.1.0"
+ }
+ },
+ "node_modules/binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "dependencies": {
+ "fill-range": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/browserslist": {
+ "version": "4.22.2",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.2.tgz",
+ "integrity": "sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "caniuse-lite": "^1.0.30001565",
+ "electron-to-chromium": "^1.4.601",
+ "node-releases": "^2.0.14",
+ "update-browserslist-db": "^1.0.13"
+ },
+ "bin": {
+ "browserslist": "cli.js"
+ },
+ "engines": {
+ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
+ }
+ },
+ "node_modules/caniuse-lite": {
+ "version": "1.0.30001578",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001578.tgz",
+ "integrity": "sha512-J/jkFgsQ3NEl4w2lCoM9ZPxrD+FoBNJ7uJUpGVjIg/j0OwJosWM36EPDv+Yyi0V4twBk9pPmlFS+PLykgEvUmg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ]
+ },
+ "node_modules/chokidar": {
+ "version": "3.5.3",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
+ "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://paulmillr.com/funding/"
+ }
+ ],
+ "dependencies": {
+ "anymatch": "~3.1.2",
+ "braces": "~3.0.2",
+ "glob-parent": "~5.1.2",
+ "is-binary-path": "~2.1.0",
+ "is-glob": "~4.0.1",
+ "normalize-path": "~3.0.0",
+ "readdirp": "~3.6.0"
+ },
+ "engines": {
+ "node": ">= 8.10.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.2"
+ }
+ },
+ "node_modules/cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "dependencies": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "node_modules/dependency-graph": {
+ "version": "0.11.0",
+ "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-0.11.0.tgz",
+ "integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==",
+ "dev": true,
+ "engines": {
+ "node": ">= 0.6.0"
+ }
+ },
+ "node_modules/electron-to-chromium": {
+ "version": "1.4.635",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.635.tgz",
+ "integrity": "sha512-iu/2D0zolKU3iDGXXxdOzNf72Jnokn+K1IN6Kk4iV6l1Tr2g/qy+mvmtfAiBwZe5S3aB5r92vp+zSZ69scYRrg==",
+ "dev": true
+ },
+ "node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "node_modules/escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/fast-glob": {
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
+ "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.stat": "^2.0.2",
+ "@nodelib/fs.walk": "^1.2.3",
+ "glob-parent": "^5.1.2",
+ "merge2": "^1.3.0",
+ "micromatch": "^4.0.4"
+ },
+ "engines": {
+ "node": ">=8.6.0"
+ }
+ },
+ "node_modules/fastq": {
+ "version": "1.17.0",
+ "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.0.tgz",
+ "integrity": "sha512-zGygtijUMT7jnk3h26kUms3BkSDp4IfIKjmnqI2tvx6nuBfiF1UqOxbnLfzdv+apBy+53oaImsKtMw/xYbW+1w==",
+ "dev": true,
+ "dependencies": {
+ "reusify": "^1.0.4"
+ }
+ },
+ "node_modules/fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/fraction.js": {
+ "version": "4.3.7",
+ "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz",
+ "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==",
+ "dev": true,
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "type": "patreon",
+ "url": "https://github.com/sponsors/rawify"
+ }
+ },
+ "node_modules/fs-extra": {
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.0.0.tgz",
+ "integrity": "sha512-4YxRvMi4P5C3WQTvdRfrv5UVqbISpqjORFQAW5QPiKAauaxNCwrEdIi6pG3tDFhKKpMen+enEhHIzB/tvIO+/w==",
+ "dev": true,
+ "dependencies": {
+ "graceful-fs": "^4.2.0",
+ "jsonfile": "^6.0.1",
+ "universalify": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=14.14"
+ }
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
+ "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true,
+ "engines": {
+ "node": "6.* || 8.* || >= 10.*"
+ }
+ },
+ "node_modules/get-stdin": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz",
+ "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==",
+ "dev": true,
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "dependencies": {
+ "is-glob": "^4.0.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/globby": {
+ "version": "14.0.0",
+ "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.0.tgz",
+ "integrity": "sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==",
+ "dev": true,
+ "dependencies": {
+ "@sindresorhus/merge-streams": "^1.0.0",
+ "fast-glob": "^3.3.2",
+ "ignore": "^5.2.4",
+ "path-type": "^5.0.0",
+ "slash": "^5.1.0",
+ "unicorn-magic": "^0.1.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/graceful-fs": {
+ "version": "4.2.10",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
+ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
+ "dev": true
+ },
+ "node_modules/ignore": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz",
+ "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==",
+ "dev": true,
+ "engines": {
+ "node": ">= 4"
+ }
+ },
+ "node_modules/is-binary-path": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+ "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+ "dev": true,
+ "dependencies": {
+ "binary-extensions": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dev": true,
+ "dependencies": {
+ "is-extglob": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/jsonfile": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
+ "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
+ "dev": true,
+ "dependencies": {
+ "universalify": "^2.0.0"
+ },
+ "optionalDependencies": {
+ "graceful-fs": "^4.1.6"
+ }
+ },
+ "node_modules/lilconfig": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz",
+ "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==",
+ "dev": true,
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/merge2": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
+ "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
+ "dev": true,
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.5",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
+ "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
+ "dev": true,
+ "dependencies": {
+ "braces": "^3.0.2",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/nanoid": {
+ "version": "3.3.7",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
+ "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "bin": {
+ "nanoid": "bin/nanoid.cjs"
+ },
+ "engines": {
+ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
+ }
+ },
+ "node_modules/node-releases": {
+ "version": "2.0.14",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
+ "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==",
+ "dev": true
+ },
+ "node_modules/normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/normalize-range": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz",
+ "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/path-type": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz",
+ "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==",
+ "dev": true,
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/picocolors": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
+ "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
+ "dev": true
+ },
+ "node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/pify": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
+ "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/postcss": {
+ "version": "8.4.35",
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
+ "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/postcss"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "nanoid": "^3.3.7",
+ "picocolors": "^1.0.0",
+ "source-map-js": "^1.0.2"
+ },
+ "engines": {
+ "node": "^10 || ^12 || >=14"
+ }
+ },
+ "node_modules/postcss-cli": {
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-11.0.0.tgz",
+ "integrity": "sha512-xMITAI7M0u1yolVcXJ9XTZiO9aO49mcoKQy6pCDFdMh9kGqhzLVpWxeD/32M/QBmkhcGypZFFOLNLmIW4Pg4RA==",
+ "dev": true,
+ "dependencies": {
+ "chokidar": "^3.3.0",
+ "dependency-graph": "^0.11.0",
+ "fs-extra": "^11.0.0",
+ "get-stdin": "^9.0.0",
+ "globby": "^14.0.0",
+ "picocolors": "^1.0.0",
+ "postcss-load-config": "^5.0.0",
+ "postcss-reporter": "^7.0.0",
+ "pretty-hrtime": "^1.0.3",
+ "read-cache": "^1.0.0",
+ "slash": "^5.0.0",
+ "yargs": "^17.0.0"
+ },
+ "bin": {
+ "postcss": "index.js"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.0.0"
+ }
+ },
+ "node_modules/postcss-load-config": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-5.0.2.tgz",
+ "integrity": "sha512-Q8QR3FYbqOKa0bnC1UQ2bFq9/ulHX5Bi34muzitMr8aDtUelO5xKeJEYC/5smE0jNE9zdB/NBnOwXKexELbRlw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "lilconfig": "^3.0.0",
+ "yaml": "^2.3.4"
+ },
+ "engines": {
+ "node": ">= 18"
+ },
+ "peerDependencies": {
+ "jiti": ">=1.21.0",
+ "postcss": ">=8.0.9"
+ },
+ "peerDependenciesMeta": {
+ "jiti": {
+ "optional": true
+ },
+ "postcss": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/postcss-reporter": {
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.0.5.tgz",
+ "integrity": "sha512-glWg7VZBilooZGOFPhN9msJ3FQs19Hie7l5a/eE6WglzYqVeH3ong3ShFcp9kDWJT1g2Y/wd59cocf9XxBtkWA==",
+ "dev": true,
+ "dependencies": {
+ "picocolors": "^1.0.0",
+ "thenby": "^1.3.4"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ "peerDependencies": {
+ "postcss": "^8.1.0"
+ }
+ },
+ "node_modules/postcss-value-parser": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz",
+ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==",
+ "dev": true
+ },
+ "node_modules/pretty-hrtime": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz",
+ "integrity": "sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==",
+ "dev": true,
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/queue-microtask": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
+ "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/read-cache": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
+ "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==",
+ "dev": true,
+ "dependencies": {
+ "pify": "^2.3.0"
+ }
+ },
+ "node_modules/readdirp": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
+ "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
+ "dev": true,
+ "dependencies": {
+ "picomatch": "^2.2.1"
+ },
+ "engines": {
+ "node": ">=8.10.0"
+ }
+ },
+ "node_modules/require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/reusify": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
+ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
+ "dev": true,
+ "engines": {
+ "iojs": ">=1.0.0",
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/run-parallel": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
+ "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ],
+ "dependencies": {
+ "queue-microtask": "^1.2.2"
+ }
+ },
+ "node_modules/slash": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
+ "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==",
+ "dev": true,
+ "engines": {
+ "node": ">=14.16"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/source-map-js": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
+ "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/thenby": {
+ "version": "1.3.4",
+ "resolved": "https://registry.npmjs.org/thenby/-/thenby-1.3.4.tgz",
+ "integrity": "sha512-89Gi5raiWA3QZ4b2ePcEwswC3me9JIg+ToSgtE0JWeCynLnLxNr/f9G+xfo9K+Oj4AFdom8YNJjibIARTJmapQ==",
+ "dev": true
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/unicorn-magic": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
+ "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/universalify": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
+ "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
+ "dev": true,
+ "engines": {
+ "node": ">= 10.0.0"
+ }
+ },
+ "node_modules/update-browserslist-db": {
+ "version": "1.0.13",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz",
+ "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "escalade": "^3.1.1",
+ "picocolors": "^1.0.0"
+ },
+ "bin": {
+ "update-browserslist-db": "cli.js"
+ },
+ "peerDependencies": {
+ "browserslist": ">= 4.21.0"
+ }
+ },
+ "node_modules/wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yaml": {
+ "version": "2.3.4",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
+ "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
+ "dev": true,
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/yargs": {
+ "version": "17.5.1",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz",
+ "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==",
+ "dev": true,
+ "dependencies": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.3",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^21.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/yargs-parser": {
+ "version": "21.0.1",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz",
+ "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==",
+ "dev": true,
+ "engines": {
+ "node": ">=12"
+ }
+ }
+ },
"dependencies": {
"@nodelib/fs.scandir": {
"version": "2.1.5",
@@ -28,6 +1027,12 @@
"fastq": "^1.6.0"
}
},
+ "@sindresorhus/merge-streams": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-1.0.0.tgz",
+ "integrity": "sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==",
+ "dev": true
+ },
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -53,21 +1058,15 @@
"picomatch": "^2.0.4"
}
},
- "array-union": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz",
- "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==",
- "dev": true
- },
"autoprefixer": {
- "version": "10.4.1",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.1.tgz",
- "integrity": "sha512-B3ZEG7wtzXDRCEFsan7HmR2AeNsxdJB0+sEC0Hc5/c2NbhJqPwuZm+tn233GBVw82L+6CtD6IPSfVruwKjfV3A==",
+ "version": "10.4.17",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz",
+ "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==",
"dev": true,
"requires": {
- "browserslist": "^4.19.1",
- "caniuse-lite": "^1.0.30001294",
- "fraction.js": "^4.1.2",
+ "browserslist": "^4.22.2",
+ "caniuse-lite": "^1.0.30001578",
+ "fraction.js": "^4.3.7",
"normalize-range": "^0.1.2",
"picocolors": "^1.0.0",
"postcss-value-parser": "^4.2.0"
@@ -89,28 +1088,27 @@
}
},
"browserslist": {
- "version": "4.19.1",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz",
- "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==",
+ "version": "4.22.2",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.2.tgz",
+ "integrity": "sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==",
"dev": true,
"requires": {
- "caniuse-lite": "^1.0.30001286",
- "electron-to-chromium": "^1.4.17",
- "escalade": "^3.1.1",
- "node-releases": "^2.0.1",
- "picocolors": "^1.0.0"
+ "caniuse-lite": "^1.0.30001565",
+ "electron-to-chromium": "^1.4.601",
+ "node-releases": "^2.0.14",
+ "update-browserslist-db": "^1.0.13"
}
},
"caniuse-lite": {
- "version": "1.0.30001294",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001294.tgz",
- "integrity": "sha512-LiMlrs1nSKZ8qkNhpUf5KD0Al1KCBE3zaT7OLOwEkagXMEDij98SiOovn9wxVGQpklk9vVC/pUSqgYmkmKOS8g==",
+ "version": "1.0.30001578",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001578.tgz",
+ "integrity": "sha512-J/jkFgsQ3NEl4w2lCoM9ZPxrD+FoBNJ7uJUpGVjIg/j0OwJosWM36EPDv+Yyi0V4twBk9pPmlFS+PLykgEvUmg==",
"dev": true
},
"chokidar": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz",
- "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==",
+ "version": "3.5.3",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
+ "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
"dev": true,
"requires": {
"anymatch": "~3.1.2",
@@ -155,19 +1153,10 @@
"integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==",
"dev": true
},
- "dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "requires": {
- "path-type": "^4.0.0"
- }
- },
"electron-to-chromium": {
- "version": "1.4.29",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.29.tgz",
- "integrity": "sha512-N2Jbwxo5Rum8G2YXeUxycs1sv4Qme/ry71HG73bv8BvZl+I/4JtRgK/En+ST/Wh/yF1fqvVCY4jZBgMxnhjtBA==",
+ "version": "1.4.635",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.635.tgz",
+ "integrity": "sha512-iu/2D0zolKU3iDGXXxdOzNf72Jnokn+K1IN6Kk4iV6l1Tr2g/qy+mvmtfAiBwZe5S3aB5r92vp+zSZ69scYRrg==",
"dev": true
},
"emoji-regex": {
@@ -183,9 +1172,9 @@
"dev": true
},
"fast-glob": {
- "version": "3.2.7",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz",
- "integrity": "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==",
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
+ "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
"dev": true,
"requires": {
"@nodelib/fs.stat": "^2.0.2",
@@ -196,9 +1185,9 @@
}
},
"fastq": {
- "version": "1.13.0",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz",
- "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==",
+ "version": "1.17.0",
+ "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.0.tgz",
+ "integrity": "sha512-zGygtijUMT7jnk3h26kUms3BkSDp4IfIKjmnqI2tvx6nuBfiF1UqOxbnLfzdv+apBy+53oaImsKtMw/xYbW+1w==",
"dev": true,
"requires": {
"reusify": "^1.0.4"
@@ -214,15 +1203,15 @@
}
},
"fraction.js": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.1.2.tgz",
- "integrity": "sha512-o2RiJQ6DZaR/5+Si0qJUIy637QMRudSi9kU/FFzx9EZazrIdnBgpU+3sEWCxAVhH2RtxW2Oz+T4p2o8uOPVcgA==",
+ "version": "4.3.7",
+ "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz",
+ "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==",
"dev": true
},
"fs-extra": {
- "version": "10.0.0",
- "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz",
- "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==",
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.0.0.tgz",
+ "integrity": "sha512-4YxRvMi4P5C3WQTvdRfrv5UVqbISpqjORFQAW5QPiKAauaxNCwrEdIi6pG3tDFhKKpMen+enEhHIzB/tvIO+/w==",
"dev": true,
"requires": {
"graceful-fs": "^4.2.0",
@@ -259,49 +1248,31 @@
}
},
"globby": {
- "version": "12.0.2",
- "resolved": "https://registry.npmjs.org/globby/-/globby-12.0.2.tgz",
- "integrity": "sha512-lAsmb/5Lww4r7MM9nCCliDZVIKbZTavrsunAsHLr9oHthrZP1qi7/gAnHOsUs9bLvEt2vKVJhHmxuL7QbDuPdQ==",
+ "version": "14.0.0",
+ "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.0.tgz",
+ "integrity": "sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==",
"dev": true,
"requires": {
- "array-union": "^3.0.1",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.7",
- "ignore": "^5.1.8",
- "merge2": "^1.4.1",
- "slash": "^4.0.0"
+ "@sindresorhus/merge-streams": "^1.0.0",
+ "fast-glob": "^3.3.2",
+ "ignore": "^5.2.4",
+ "path-type": "^5.0.0",
+ "slash": "^5.1.0",
+ "unicorn-magic": "^0.1.0"
}
},
"graceful-fs": {
- "version": "4.2.8",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
- "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==",
+ "version": "4.2.10",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
+ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
"dev": true
},
"ignore": {
- "version": "5.1.9",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.9.tgz",
- "integrity": "sha512-2zeMQpbKz5dhZ9IwL0gbxSW5w0NK/MSAMtNuhgIHEPmaU3vPdKPL0UdvUCXs5SS4JAwsBxysK5sFMW8ocFiVjQ==",
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz",
+ "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==",
"dev": true
},
- "import-cwd": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/import-cwd/-/import-cwd-3.0.0.tgz",
- "integrity": "sha512-4pnzH16plW+hgvRECbDWpQl3cqtvSofHWh44met7ESfZ8UZOWWddm8hEyDTqREJ9RbYHY8gi8DqmaelApoOGMg==",
- "dev": true,
- "requires": {
- "import-from": "^3.0.0"
- }
- },
- "import-from": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/import-from/-/import-from-3.0.0.tgz",
- "integrity": "sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ==",
- "dev": true,
- "requires": {
- "resolve-from": "^5.0.0"
- }
- },
"is-binary-path": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
@@ -314,7 +1285,7 @@
"is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true
},
"is-fullwidth-code-point": {
@@ -349,39 +1320,9 @@
}
},
"lilconfig": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.4.tgz",
- "integrity": "sha512-bfTIN7lEsiooCocSISTWXkiWJkRqtL9wYtYy+8EK3Y41qh3mpwPU0ycTOgjdY9ErwXCc8QyrQp82bdL0Xkm9yA==",
- "dev": true
- },
- "lodash.difference": {
- "version": "4.5.0",
- "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz",
- "integrity": "sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=",
- "dev": true
- },
- "lodash.forown": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/lodash.forown/-/lodash.forown-4.4.0.tgz",
- "integrity": "sha1-hRFc8E9z75ZuztUlEdOJPMRmg68=",
- "dev": true
- },
- "lodash.get": {
- "version": "4.4.2",
- "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
- "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=",
- "dev": true
- },
- "lodash.groupby": {
- "version": "4.6.0",
- "resolved": "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz",
- "integrity": "sha1-Cwih3PaDl8OXhVwyOXg4Mt90A9E=",
- "dev": true
- },
- "lodash.sortby": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
- "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz",
+ "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==",
"dev": true
},
"merge2": {
@@ -391,25 +1332,25 @@
"dev": true
},
"micromatch": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz",
- "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==",
+ "version": "4.0.5",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
+ "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
"dev": true,
"requires": {
- "braces": "^3.0.1",
- "picomatch": "^2.2.3"
+ "braces": "^3.0.2",
+ "picomatch": "^2.3.1"
}
},
"nanoid": {
- "version": "3.1.30",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.30.tgz",
- "integrity": "sha512-zJpuPDwOv8D2zq2WRoMe1HsfZthVewpel9CAvTfc/2mBD1uUT/agc5f7GHGWXlYkFvi1mVxe4IjvP2HNrop7nQ==",
+ "version": "3.3.7",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
+ "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
"dev": true
},
"node-releases": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz",
- "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==",
+ "version": "2.0.14",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
+ "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==",
"dev": true
},
"normalize-path": {
@@ -425,9 +1366,9 @@
"dev": true
},
"path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz",
+ "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==",
"dev": true
},
"picocolors": {
@@ -437,71 +1378,66 @@
"dev": true
},
"picomatch": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz",
- "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==",
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true
},
"pify": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=",
+ "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==",
"dev": true
},
"postcss": {
- "version": "8.4.5",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.5.tgz",
- "integrity": "sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg==",
+ "version": "8.4.35",
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
+ "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
"dev": true,
"requires": {
- "nanoid": "^3.1.30",
+ "nanoid": "^3.3.7",
"picocolors": "^1.0.0",
- "source-map-js": "^1.0.1"
+ "source-map-js": "^1.0.2"
}
},
"postcss-cli": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-9.1.0.tgz",
- "integrity": "sha512-zvDN2ADbWfza42sAnj+O2uUWyL0eRL1V+6giM2vi4SqTR3gTYy8XzcpfwccayF2szcUif0HMmXiEaDv9iEhcpw==",
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-11.0.0.tgz",
+ "integrity": "sha512-xMITAI7M0u1yolVcXJ9XTZiO9aO49mcoKQy6pCDFdMh9kGqhzLVpWxeD/32M/QBmkhcGypZFFOLNLmIW4Pg4RA==",
"dev": true,
"requires": {
"chokidar": "^3.3.0",
"dependency-graph": "^0.11.0",
- "fs-extra": "^10.0.0",
+ "fs-extra": "^11.0.0",
"get-stdin": "^9.0.0",
- "globby": "^12.0.0",
+ "globby": "^14.0.0",
"picocolors": "^1.0.0",
- "postcss-load-config": "^3.0.0",
+ "postcss-load-config": "^5.0.0",
"postcss-reporter": "^7.0.0",
"pretty-hrtime": "^1.0.3",
"read-cache": "^1.0.0",
- "slash": "^4.0.0",
+ "slash": "^5.0.0",
"yargs": "^17.0.0"
}
},
"postcss-load-config": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.0.tgz",
- "integrity": "sha512-ipM8Ds01ZUophjDTQYSVP70slFSYg3T0/zyfII5vzhN6V57YSxMgG5syXuwi5VtS8wSf3iL30v0uBdoIVx4Q0g==",
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-5.0.2.tgz",
+ "integrity": "sha512-Q8QR3FYbqOKa0bnC1UQ2bFq9/ulHX5Bi34muzitMr8aDtUelO5xKeJEYC/5smE0jNE9zdB/NBnOwXKexELbRlw==",
"dev": true,
"requires": {
- "import-cwd": "^3.0.0",
- "lilconfig": "^2.0.3",
- "yaml": "^1.10.2"
+ "lilconfig": "^3.0.0",
+ "yaml": "^2.3.4"
}
},
"postcss-reporter": {
- "version": "7.0.4",
- "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.0.4.tgz",
- "integrity": "sha512-jY/fnpGSin7kwJeunXbY35STp5O3VIxSFdjee5JkoPQ+FfGH5JW3N+Xe9oAPcL9UkjWjkK+JC72o8XH4XXKdhw==",
+ "version": "7.0.5",
+ "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.0.5.tgz",
+ "integrity": "sha512-glWg7VZBilooZGOFPhN9msJ3FQs19Hie7l5a/eE6WglzYqVeH3ong3ShFcp9kDWJT1g2Y/wd59cocf9XxBtkWA==",
"dev": true,
"requires": {
- "lodash.difference": "^4.5.0",
- "lodash.forown": "^4.4.0",
- "lodash.get": "^4.4.2",
- "lodash.groupby": "^4.6.0",
- "lodash.sortby": "^4.7.0",
- "picocolors": "^1.0.0"
+ "picocolors": "^1.0.0",
+ "thenby": "^1.3.4"
}
},
"postcss-value-parser": {
@@ -513,7 +1449,7 @@
"pretty-hrtime": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz",
- "integrity": "sha1-t+PqQkNaTJsnWdmeDyAesZWALuE=",
+ "integrity": "sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==",
"dev": true
},
"queue-microtask": {
@@ -525,7 +1461,7 @@
"read-cache": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
- "integrity": "sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=",
+ "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==",
"dev": true,
"requires": {
"pify": "^2.3.0"
@@ -543,13 +1479,7 @@
"require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
- "dev": true
- },
- "resolve-from": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
- "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
+ "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true
},
"reusify": {
@@ -568,15 +1498,15 @@
}
},
"slash": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz",
- "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==",
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
+ "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==",
"dev": true
},
"source-map-js": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.1.tgz",
- "integrity": "sha512-4+TN2b3tqOCd/kaGRJ/sTYA0tR0mdXx26ipdolxcwtJVqEnqNYvlCAt1q3ypy4QMlYus+Zh34RNtYLoq2oQ4IA==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
+ "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
"dev": true
},
"string-width": {
@@ -599,6 +1529,12 @@
"ansi-regex": "^5.0.1"
}
},
+ "thenby": {
+ "version": "1.3.4",
+ "resolved": "https://registry.npmjs.org/thenby/-/thenby-1.3.4.tgz",
+ "integrity": "sha512-89Gi5raiWA3QZ4b2ePcEwswC3me9JIg+ToSgtE0JWeCynLnLxNr/f9G+xfo9K+Oj4AFdom8YNJjibIARTJmapQ==",
+ "dev": true
+ },
"to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@@ -608,12 +1544,28 @@
"is-number": "^7.0.0"
}
},
+ "unicorn-magic": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
+ "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
+ "dev": true
+ },
"universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
"dev": true
},
+ "update-browserslist-db": {
+ "version": "1.0.13",
+ "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz",
+ "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==",
+ "dev": true,
+ "requires": {
+ "escalade": "^3.1.1",
+ "picocolors": "^1.0.0"
+ }
+ },
"wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@@ -632,15 +1584,15 @@
"dev": true
},
"yaml": {
- "version": "1.10.2",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
- "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
+ "version": "2.3.4",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
+ "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
"dev": true
},
"yargs": {
- "version": "17.3.0",
- "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.0.tgz",
- "integrity": "sha512-GQl1pWyDoGptFPJx9b9L6kmR33TGusZvXIZUT+BOz9f7X2L94oeAskFYLEg/FkhV06zZPBYLvLZRWeYId29lew==",
+ "version": "17.5.1",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz",
+ "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==",
"dev": true,
"requires": {
"cliui": "^7.0.2",
@@ -653,9 +1605,9 @@
}
},
"yargs-parser": {
- "version": "21.0.0",
- "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.0.tgz",
- "integrity": "sha512-z9kApYUOCwoeZ78rfRYYWdiU/iNL6mwwYlkkZfJoyMR1xps+NEBX5X7XmRpxkZHhXJ6+Ey00IwKxBBSW9FIjyA==",
+ "version": "21.0.1",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz",
+ "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==",
"dev": true
}
}
diff --git a/docs/package.json b/docs/package.json
index 3412e09458a..b2185ed2596 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -1,7 +1,7 @@
{
"devDependencies": {
- "postcss": "8.4.5",
- "autoprefixer": "10.4.1",
- "postcss-cli": "9.1.0"
+ "postcss": "8.4.35",
+ "autoprefixer": "10.4.17",
+ "postcss-cli": "11.0.0"
}
}
diff --git a/docs/static/images/branching_model_v2.png b/docs/static/images/branching_model_v2.png
new file mode 100644
index 00000000000..701b0c5e816
Binary files /dev/null and b/docs/static/images/branching_model_v2.png differ
diff --git a/docs/static/images/configuration_permissions.png b/docs/static/images/configuration_permissions.png
new file mode 100644
index 00000000000..6d586a66135
Binary files /dev/null and b/docs/static/images/configuration_permissions.png differ
diff --git a/docs/static/images/product-custom-fields_1.png b/docs/static/images/product-custom-fields_1.png
new file mode 100644
index 00000000000..98ffc74fb27
Binary files /dev/null and b/docs/static/images/product-custom-fields_1.png differ
diff --git a/docs/static/images/product-scm-type_1.png b/docs/static/images/product-scm-type_1.png
new file mode 100644
index 00000000000..cd60414bb7c
Binary files /dev/null and b/docs/static/images/product-scm-type_1.png differ
diff --git a/docs/static/images/questionnaires-add-from-engagement.png b/docs/static/images/questionnaires-add-from-engagement.png
new file mode 100644
index 00000000000..02e92d04e49
Binary files /dev/null and b/docs/static/images/questionnaires-add-from-engagement.png differ
diff --git a/docs/static/images/questionnaires-added.png b/docs/static/images/questionnaires-added.png
new file mode 100644
index 00000000000..d5301f13aff
Binary files /dev/null and b/docs/static/images/questionnaires-added.png differ
diff --git a/docs/static/images/questionnaires-assign-user.png b/docs/static/images/questionnaires-assign-user.png
new file mode 100644
index 00000000000..8d1c9f1ea66
Binary files /dev/null and b/docs/static/images/questionnaires-assign-user.png differ
diff --git a/docs/static/images/questionnaires-create-engagement.png b/docs/static/images/questionnaires-create-engagement.png
new file mode 100644
index 00000000000..d8c92b55697
Binary files /dev/null and b/docs/static/images/questionnaires-create-engagement.png differ
diff --git a/docs/static/images/questionnaires-create-new.png b/docs/static/images/questionnaires-create-new.png
new file mode 100644
index 00000000000..8b788ec2294
Binary files /dev/null and b/docs/static/images/questionnaires-create-new.png differ
diff --git a/docs/static/images/questionnaires-main-view.png b/docs/static/images/questionnaires-main-view.png
new file mode 100644
index 00000000000..1e2a5715bf0
Binary files /dev/null and b/docs/static/images/questionnaires-main-view.png differ
diff --git a/docs/static/images/questionnaires-multiple-choice.png b/docs/static/images/questionnaires-multiple-choice.png
new file mode 100644
index 00000000000..49f8d25c690
Binary files /dev/null and b/docs/static/images/questionnaires-multiple-choice.png differ
diff --git a/docs/static/images/questionnaires-new-engagement.png b/docs/static/images/questionnaires-new-engagement.png
new file mode 100644
index 00000000000..da61ab6d1c2
Binary files /dev/null and b/docs/static/images/questionnaires-new-engagement.png differ
diff --git a/docs/static/images/questionnaires-open-ended.png b/docs/static/images/questionnaires-open-ended.png
new file mode 100644
index 00000000000..71ef0836aec
Binary files /dev/null and b/docs/static/images/questionnaires-open-ended.png differ
diff --git a/docs/static/images/questionnaires-publicize.png b/docs/static/images/questionnaires-publicize.png
new file mode 100644
index 00000000000..b450459851c
Binary files /dev/null and b/docs/static/images/questionnaires-publicize.png differ
diff --git a/docs/static/images/questionnaires-question-search.png b/docs/static/images/questionnaires-question-search.png
new file mode 100644
index 00000000000..70496c81e0d
Binary files /dev/null and b/docs/static/images/questionnaires-question-search.png differ
diff --git a/docs/static/images/questionnaires-questions.png b/docs/static/images/questionnaires-questions.png
new file mode 100644
index 00000000000..f576ed331dc
Binary files /dev/null and b/docs/static/images/questionnaires-questions.png differ
diff --git a/docs/static/images/questionnaires-respond.png b/docs/static/images/questionnaires-respond.png
new file mode 100644
index 00000000000..f25968f7b24
Binary files /dev/null and b/docs/static/images/questionnaires-respond.png differ
diff --git a/docs/static/images/questionnaires-search.png b/docs/static/images/questionnaires-search.png
new file mode 100644
index 00000000000..05bc6508fc4
Binary files /dev/null and b/docs/static/images/questionnaires-search.png differ
diff --git a/docs/static/images/questionnaires-select-questions.png b/docs/static/images/questionnaires-select-questions.png
new file mode 100644
index 00000000000..14462c8970c
Binary files /dev/null and b/docs/static/images/questionnaires-select-questions.png differ
diff --git a/docs/static/images/questionnaires-select-survey.png b/docs/static/images/questionnaires-select-survey.png
new file mode 100644
index 00000000000..1931b4cc513
Binary files /dev/null and b/docs/static/images/questionnaires-select-survey.png differ
diff --git a/docs/static/images/questionnaires-share.png b/docs/static/images/questionnaires-share.png
new file mode 100644
index 00000000000..1ce972d1e91
Binary files /dev/null and b/docs/static/images/questionnaires-share.png differ
diff --git a/docs/static/images/questionnaires-sidebar.png b/docs/static/images/questionnaires-sidebar.png
new file mode 100644
index 00000000000..51846decaa3
Binary files /dev/null and b/docs/static/images/questionnaires-sidebar.png differ
diff --git a/docs/static/images/questionnaires-system-settings.png b/docs/static/images/questionnaires-system-settings.png
new file mode 100644
index 00000000000..61ddfa640a0
Binary files /dev/null and b/docs/static/images/questionnaires-system-settings.png differ
diff --git a/docs/static/images/questionnaires-unassigned.png b/docs/static/images/questionnaires-unassigned.png
new file mode 100644
index 00000000000..530ba65f675
Binary files /dev/null and b/docs/static/images/questionnaires-unassigned.png differ
diff --git a/docs/static/images/questionnaires-view-questionnaire.png b/docs/static/images/questionnaires-view-questionnaire.png
new file mode 100644
index 00000000000..b6ef757d4cb
Binary files /dev/null and b/docs/static/images/questionnaires-view-questionnaire.png differ
diff --git a/docs/static/images/questionnaires-view-responses.png b/docs/static/images/questionnaires-view-responses.png
new file mode 100644
index 00000000000..562910c0589
Binary files /dev/null and b/docs/static/images/questionnaires-view-responses.png differ
diff --git a/docs/static/images/slack_add_product.png b/docs/static/images/slack_add_product.png
new file mode 100644
index 00000000000..81a6dad2e2f
Binary files /dev/null and b/docs/static/images/slack_add_product.png differ
diff --git a/docs/static/images/slack_import_scan.png b/docs/static/images/slack_import_scan.png
new file mode 100644
index 00000000000..fe154d2fb5b
Binary files /dev/null and b/docs/static/images/slack_import_scan.png differ
diff --git a/docs/static/images/source-code-repositories-bitbucket-onpremise_1.png b/docs/static/images/source-code-repositories-bitbucket-onpremise_1.png
new file mode 100644
index 00000000000..6b333abbd36
Binary files /dev/null and b/docs/static/images/source-code-repositories-bitbucket-onpremise_1.png differ
diff --git a/docs/static/images/source-code-repositories-bitbucket_1.png b/docs/static/images/source-code-repositories-bitbucket_1.png
new file mode 100644
index 00000000000..d3e85d6b19b
Binary files /dev/null and b/docs/static/images/source-code-repositories-bitbucket_1.png differ
diff --git a/docs/static/images/source-code-repositories-gitlab_1.png b/docs/static/images/source-code-repositories-gitlab_1.png
new file mode 100644
index 00000000000..2aa43b63d1e
Binary files /dev/null and b/docs/static/images/source-code-repositories-gitlab_1.png differ
diff --git a/docs/static/images/tags-bulk-edit-complete.png b/docs/static/images/tags-bulk-edit-complete.png
new file mode 100644
index 00000000000..9ca91e2b294
Binary files /dev/null and b/docs/static/images/tags-bulk-edit-complete.png differ
diff --git a/docs/static/images/tags-bulk-edit-submit.png b/docs/static/images/tags-bulk-edit-submit.png
new file mode 100644
index 00000000000..7e5c0d86c28
Binary files /dev/null and b/docs/static/images/tags-bulk-edit-submit.png differ
diff --git a/docs/static/images/tags-finding-filter-snippet.png b/docs/static/images/tags-finding-filter-snippet.png
new file mode 100644
index 00000000000..af8986c367b
Binary files /dev/null and b/docs/static/images/tags-finding-filter-snippet.png differ
diff --git a/docs/static/images/tags-high-level-example.png b/docs/static/images/tags-high-level-example.png
new file mode 100644
index 00000000000..b85ba163d3b
Binary files /dev/null and b/docs/static/images/tags-high-level-example.png differ
diff --git a/docs/static/images/tags-inherit-exmaple.png b/docs/static/images/tags-inherit-exmaple.png
new file mode 100644
index 00000000000..e4b80605ee2
Binary files /dev/null and b/docs/static/images/tags-inherit-exmaple.png differ
diff --git a/docs/static/images/tags-management-on-object.png b/docs/static/images/tags-management-on-object.png
new file mode 100644
index 00000000000..79bd0527534
Binary files /dev/null and b/docs/static/images/tags-management-on-object.png differ
diff --git a/docs/static/images/tags-select-findings-for-bulk-edit.png b/docs/static/images/tags-select-findings-for-bulk-edit.png
new file mode 100644
index 00000000000..8e6fc47dc98
Binary files /dev/null and b/docs/static/images/tags-select-findings-for-bulk-edit.png differ
diff --git a/dojo/__init__.py b/dojo/__init__.py
index 51ea8e4cee6..f1c39c15ed1 100644
--- a/dojo/__init__.py
+++ b/dojo/__init__.py
@@ -4,8 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
-default_app_config = 'dojo.apps.DojoAppConfig'
-
-__version__ = '2.7.0-dev'
+__version__ = '2.32.0-dev'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
-__docs__ = 'https://defectdojo.github.io/django-DefectDojo'
+__docs__ = 'https://documentation.defectdojo.com'
diff --git a/dojo/rules/__init__.py b/dojo/announcement/__init__.py
similarity index 100%
rename from dojo/rules/__init__.py
rename to dojo/announcement/__init__.py
diff --git a/dojo/announcement/signals.py b/dojo/announcement/signals.py
new file mode 100644
index 00000000000..5b89d71d780
--- /dev/null
+++ b/dojo/announcement/signals.py
@@ -0,0 +1,37 @@
+from dojo.models import Announcement, UserAnnouncement, Dojo_User
+from django.db.models.signals import post_save
+from django.dispatch import receiver
+from django.conf import settings
+
+
+@receiver(post_save, sender=Dojo_User)
+def add_announcement_to_new_user(sender, instance, **kwargs):
+ announcements = Announcement.objects.all()
+ if announcements.count() > 0:
+ dojo_user = Dojo_User.objects.get(id=instance.id)
+ announcement = announcements.first()
+ cloud_announcement = (
+ "Cloud and On-Premise Subscriptions Now Available!"
+ in announcement.message
+ )
+ if not cloud_announcement or settings.CREATE_CLOUD_BANNER:
+ user_announcements = UserAnnouncement.objects.filter(
+ user=dojo_user, announcement=announcement
+ )
+ if user_announcements.count() == 0:
+ UserAnnouncement.objects.get_or_create(
+ user=dojo_user, announcement=announcement
+ )
+
+
+@receiver(post_save, sender=Announcement)
+def announcement_post_save(sender, instance, created, **kwargs):
+ if created:
+ UserAnnouncement.objects.bulk_create(
+ [
+ UserAnnouncement(
+ user=user_id, announcement=instance
+ )
+ for user_id in Dojo_User.objects.all()
+ ]
+ )
diff --git a/dojo/announcement/urls.py b/dojo/announcement/urls.py
new file mode 100644
index 00000000000..c62d26f13d5
--- /dev/null
+++ b/dojo/announcement/urls.py
@@ -0,0 +1,15 @@
+from django.urls import re_path
+from dojo.announcement import views
+
+urlpatterns = [
+ re_path(
+ r"^configure_announcement$",
+ views.configure_announcement,
+ name="configure_announcement",
+ ),
+ re_path(
+ r"^dismiss_announcement$",
+ views.dismiss_announcement,
+ name="dismiss_announcement",
+ ),
+]
diff --git a/dojo/announcement/views.py b/dojo/announcement/views.py
new file mode 100644
index 00000000000..2544dba3539
--- /dev/null
+++ b/dojo/announcement/views.py
@@ -0,0 +1,92 @@
+import logging
+
+from django.shortcuts import render
+from django.contrib import messages
+from django.urls import reverse
+from django.utils.translation import gettext_lazy as _
+from django.utils.translation import gettext
+from django.http import HttpResponseRedirect
+from dojo.utils import add_breadcrumb
+
+from dojo.forms import AnnouncementCreateForm, AnnouncementRemoveForm
+from dojo.models import Announcement, UserAnnouncement
+from dojo.authorization.authorization_decorators import (
+ user_is_configuration_authorized,
+)
+
+logger = logging.getLogger(__name__)
+
+
+@user_is_configuration_authorized("dojo.change_announcement")
+def configure_announcement(request):
+ remove = False
+ if request.method == "GET":
+ try:
+ announcement = Announcement.objects.get(id=1)
+ form = AnnouncementRemoveForm(
+ initial={
+ "message": announcement.message,
+ "style": announcement.style,
+ "dismissable": announcement.dismissable,
+ }
+ )
+ remove = True
+ except Announcement.DoesNotExist:
+ form = AnnouncementCreateForm()
+ elif request.method == "POST":
+ if "_Remove" in request.POST:
+ Announcement.objects.all().delete()
+ messages.add_message(
+ request,
+ messages.SUCCESS,
+ _("Announcement removed for everyone."),
+ extra_tags="alert-success",
+ )
+ return HttpResponseRedirect("dashboard")
+ form = AnnouncementCreateForm(request.POST)
+ announcement, created = Announcement.objects.get_or_create(id=1)
+ if form.is_valid() and created:
+ announcement.message = form.cleaned_data["message"]
+ announcement.style = form.cleaned_data["style"]
+ announcement.dismissable = form.cleaned_data["dismissable"]
+ announcement.save()
+ messages.add_message(
+ request,
+ messages.SUCCESS,
+ _("Announcement updated successfully."),
+ extra_tags="alert-success",
+ )
+ return HttpResponseRedirect(reverse("configure_announcement"))
+
+ add_breadcrumb(
+ title=gettext("Announcement Configuration"),
+ top_level=True,
+ request=request,
+ )
+ return render(
+ request, "dojo/announcement.html", {"form": form, "remove": remove}
+ )
+
+
+def dismiss_announcement(request):
+ if request.method == "POST":
+ deleted_count, objects_deleted = UserAnnouncement.objects.filter(
+ user=request.user, announcement=1
+ ).delete()
+ if deleted_count > 0:
+ messages.add_message(
+ request,
+ messages.SUCCESS,
+ _("Announcement removed."),
+ extra_tags="alert-success",
+ )
+ return HttpResponseRedirect("dashboard")
+ else:
+ messages.add_message(
+ request,
+ messages.ERROR,
+ _("Failed to remove announcement."),
+ extra_tags="alert-danger",
+ )
+ return render(request, "dojo/dismiss_announcement.html")
+ return render(request, "dojo/dismiss_announcement.html")
diff --git a/dojo/api_v2/exception_handler.py b/dojo/api_v2/exception_handler.py
index 46131c6fa53..5aa677bd10e 100644
--- a/dojo/api_v2/exception_handler.py
+++ b/dojo/api_v2/exception_handler.py
@@ -1,7 +1,11 @@
from django.core.exceptions import ValidationError
from django.db.models.deletion import RestrictedError
from rest_framework.response import Response
-from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_409_CONFLICT, HTTP_500_INTERNAL_SERVER_ERROR
+from rest_framework.status import (
+ HTTP_400_BAD_REQUEST,
+ HTTP_409_CONFLICT,
+ HTTP_500_INTERNAL_SERVER_ERROR,
+)
from rest_framework.views import exception_handler
import logging
@@ -18,12 +22,12 @@ def custom_exception_handler(exc, context):
response = Response()
response.status_code = HTTP_409_CONFLICT
response.data = {}
- response.data['message'] = str(exc)
+ response.data["message"] = str(exc)
elif isinstance(exc, ValidationError):
response = Response()
response.status_code = HTTP_400_BAD_REQUEST
response.data = {}
- response.data['message'] = str(exc)
+ response.data["message"] = str(exc)
else:
if response is None:
# There is no standard error response, so we assume an unexpected
@@ -33,15 +37,19 @@ def custom_exception_handler(exc, context):
response = Response()
response.status_code = HTTP_500_INTERNAL_SERVER_ERROR
response.data = {}
- response.data['message'] = 'Internal server error, check logs for details'
+ response.data[
+ "message"
+ ] = "Internal server error, check logs for details"
else:
if response.status_code < 500:
# HTTP status codes lower than 500 are no technical errors.
# They need not to be logged and we provide the exception
# message, if it is different from the detail that is already
# in the response.
- if isinstance(response.data, dict) and str(exc) != response.data.get('detail', ''):
- response.data['message'] = str(exc)
+ if isinstance(response.data, dict) and str(
+ exc
+ ) != response.data.get("detail", ""):
+ response.data["message"] = str(exc)
else:
# HTTP status code 500 or higher are technical errors.
# They get logged and we don't change the response.
diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py
new file mode 100644
index 00000000000..e0770971f3f
--- /dev/null
+++ b/dojo/api_v2/mixins.py
@@ -0,0 +1,62 @@
+from django.db import DEFAULT_DB_ALIAS
+from django.contrib.admin.utils import NestedObjects
+from drf_spectacular.utils import extend_schema
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework.decorators import action
+from rest_framework import status
+from rest_framework.authtoken.models import Token
+from dojo.api_v2 import serializers
+from dojo.models import Question, Answer
+import itertools
+
+
+class DeletePreviewModelMixin:
+ @extend_schema(
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.DeletePreviewSerializer(many=True)
+ },
+ )
+ @swagger_auto_schema(
+ method="get",
+ responses={"default": serializers.DeletePreviewSerializer(many=True)},
+ )
+ @action(detail=True, methods=["get"], filter_backends=[], suffix="List")
+ def delete_preview(self, request, pk=None):
+ object = self.get_object()
+
+ collector = NestedObjects(using=DEFAULT_DB_ALIAS)
+ collector.collect([object])
+ rels = collector.nested()
+
+ def flatten(elem):
+ if isinstance(elem, list):
+ return itertools.chain.from_iterable(map(flatten, elem))
+ else:
+ return [elem]
+
+ rels = [
+ {
+ "model": type(x).__name__,
+ "id": x.id if hasattr(x, "id") else None,
+ "name": str(x)
+ if not isinstance(x, Token)
+ else "",
+ }
+ for x in flatten(rels)
+ ]
+
+ page = self.paginate_queryset(rels)
+
+ serializer = serializers.DeletePreviewSerializer(page, many=True)
+ return self.get_paginated_response(serializer.data)
+
+
+class QuestionSubClassFieldsMixin(object):
+ def get_queryset(self):
+ return Question.objects.select_subclasses()
+
+
+class AnswerSubClassFieldsMixin(object):
+ def get_queryset(self):
+ return Answer.objects.select_subclasses()
diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py
index 8ccb6e39a4b..aac0eb79264 100644
--- a/dojo/api_v2/permissions.py
+++ b/dojo/api_v2/permissions.py
@@ -1,35 +1,70 @@
import re
-from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
-from django.conf import settings
-from dojo.api_v2.serializers import get_import_meta_data_from_dict, get_product_id_from_dict
-from dojo.importers.reimporter.utils import get_target_engagement_if_exists, get_target_product_by_id_if_exists, \
- get_target_product_if_exists, get_target_test_if_exists, \
- get_target_product_type_if_exists
-from dojo.models import Endpoint, Engagement, Finding, Finding_Group, Product_Type, Product, Test, Dojo_Group
+from rest_framework.exceptions import (
+ ParseError,
+ PermissionDenied,
+ ValidationError,
+)
+from dojo.api_v2.serializers import (
+ get_import_meta_data_from_dict,
+ get_product_id_from_dict,
+)
+from dojo.importers.reimporter.utils import (
+ get_target_engagement_if_exists,
+ get_target_product_by_id_if_exists,
+ get_target_product_if_exists,
+ get_target_test_if_exists,
+ get_target_product_type_if_exists,
+)
+from dojo.models import (
+ Endpoint,
+ Engagement,
+ Finding,
+ Finding_Group,
+ Product_Type,
+ Product,
+ Test,
+ Dojo_Group,
+ Cred_Mapping,
+)
from django.shortcuts import get_object_or_404
from rest_framework import permissions, serializers
-from dojo.authorization.authorization import user_has_global_permission, user_has_permission, user_has_configuration_permission
+from dojo.authorization.authorization import (
+ user_has_global_permission,
+ user_has_permission,
+ user_has_configuration_permission,
+)
from dojo.authorization.roles_permissions import Permissions
def check_post_permission(request, post_model, post_pk, post_permission):
- if request.method == 'POST':
+ if request.method == "POST":
if request.data.get(post_pk) is None:
- raise ParseError('Unable to check for permissions: Attribute \'{}\' is required'.format(post_pk))
+ raise ParseError(
+ "Unable to check for permissions: Attribute '{}' is required".format(
+ post_pk
+ )
+ )
object = get_object_or_404(post_model, pk=request.data.get(post_pk))
return user_has_permission(request.user, object, post_permission)
else:
return True
-def check_object_permission(request, object, get_permission, put_permission, delete_permission, post_permission=None):
- if request.method == 'GET':
+def check_object_permission(
+ request,
+ object,
+ get_permission,
+ put_permission,
+ delete_permission,
+ post_permission=None,
+):
+ if request.method == "GET":
return user_has_permission(request.user, object, get_permission)
- elif request.method == 'PUT' or request.method == 'PATCH':
+ elif request.method == "PUT" or request.method == "PATCH":
return user_has_permission(request.user, object, put_permission)
- elif request.method == 'DELETE':
+ elif request.method == "DELETE":
return user_has_permission(request.user, object, delete_permission)
- elif request.method == 'POST':
+ elif request.method == "POST":
return user_has_permission(request.user, object, post_permission)
else:
return False
@@ -37,57 +72,132 @@ def check_object_permission(request, object, get_permission, put_permission, del
class UserHasAppAnalysisPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Technology_Add)
+ return check_post_permission(
+ request, Product, "product", Permissions.Technology_Add
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj.product, Permissions.Technology_View, Permissions.Technology_Edit, Permissions.Technology_Delete)
+ return check_object_permission(
+ request,
+ obj.product,
+ Permissions.Technology_View,
+ Permissions.Technology_Edit,
+ Permissions.Technology_Delete,
+ )
+
+
+class UserHasCredentialPermission(permissions.BasePermission):
+ def has_permission(self, request, view):
+ if request.data.get("product") is not None:
+ return check_post_permission(
+ request, Cred_Mapping, "product", Permissions.Credential_Add
+ )
+ if request.data.get("engagement") is not None:
+ return check_post_permission(
+ request, Cred_Mapping, "engagement", Permissions.Credential_Add
+ )
+ if request.data.get("test") is not None:
+ return check_post_permission(
+ request, Cred_Mapping, "test", Permissions.Credential_Add
+ )
+ if request.data.get("finding") is not None:
+ return check_post_permission(
+ request, Cred_Mapping, "finding", Permissions.Credential_Add
+ )
+ return check_post_permission(
+ request, Cred_Mapping, "product", Permissions.Credential_Add
+ )
+
+ def has_object_permission(self, request, view, obj):
+ return check_object_permission(
+ request,
+ obj.product,
+ Permissions.Credential_View,
+ Permissions.Credential_Edit,
+ Permissions.Credential_Delete,
+ )
class UserHasDojoGroupPermission(permissions.BasePermission):
def has_permission(self, request, view):
- if request.method == 'GET':
- return user_has_configuration_permission(request.user, 'auth.view_group', 'staff')
- elif request.method == 'POST':
- return user_has_configuration_permission(request.user, 'auth.create_group', 'staff')
+ if request.method == "GET":
+ return user_has_configuration_permission(
+ request.user, "auth.view_group"
+ )
+ elif request.method == "POST":
+ return user_has_configuration_permission(
+ request.user, "auth.add_group"
+ )
else:
return True
def has_object_permission(self, request, view, obj):
- if request.method == 'GET':
+ if request.method == "GET":
# Users need to be authorized to view groups in general and only the groups they are a member of
- # because with the group they can see user information that might be considered as confidential
- return user_has_configuration_permission(request.user, 'auth.view_group', 'staff') and user_has_permission(request.user, obj, Permissions.Group_View)
+ # because with the group they can see user information that might
+ # be considered as confidential
+ return user_has_configuration_permission(
+ request.user, "auth.view_group"
+ ) and user_has_permission(
+ request.user, obj, Permissions.Group_View
+ )
else:
- return check_object_permission(request, obj, Permissions.Group_View, Permissions.Group_Edit, Permissions.Group_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Group_View,
+ Permissions.Group_Edit,
+ Permissions.Group_Delete,
+ )
class UserHasDojoGroupMemberPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Dojo_Group, 'group', Permissions.Group_Manage_Members)
+ return check_post_permission(
+ request, Dojo_Group, "group", Permissions.Group_Manage_Members
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Group_View, Permissions.Group_Manage_Members, Permissions.Group_Member_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Group_View,
+ Permissions.Group_Manage_Members,
+ Permissions.Group_Member_Delete,
+ )
class UserHasDojoMetaPermission(permissions.BasePermission):
def has_permission(self, request, view):
- if request.method == 'POST':
+ if request.method == "POST":
has_permission_result = True
- product_id = request.data.get('product', None)
+ product_id = request.data.get("product", None)
if product_id:
object = get_object_or_404(Product, pk=product_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Product_Edit)
- finding_id = request.data.get('finding', None)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Product_Edit
+ )
+ )
+ finding_id = request.data.get("finding", None)
if finding_id:
object = get_object_or_404(Finding, pk=finding_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Finding_Edit)
- endpoint_id = request.data.get('endpoint', None)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Finding_Edit
+ )
+ )
+ endpoint_id = request.data.get("endpoint", None)
if endpoint_id:
object = get_object_or_404(Endpoint, pk=endpoint_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Endpoint_Edit)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Endpoint_Edit
+ )
+ )
return has_permission_result
else:
return True
@@ -96,91 +206,222 @@ def has_object_permission(self, request, view, obj):
has_permission_result = True
product = obj.product
if product:
- has_permission_result = has_permission_result and \
- check_object_permission(request, product, Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ product,
+ Permissions.Product_View,
+ Permissions.Product_Edit,
+ Permissions.Product_Edit,
+ )
+ )
finding = obj.finding
if finding:
- has_permission_result = has_permission_result and \
- check_object_permission(request, finding, Permissions.Finding_View, Permissions.Finding_Edit, Permissions.Finding_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ finding,
+ Permissions.Finding_View,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Edit,
+ )
+ )
endpoint = obj.endpoint
if endpoint:
- has_permission_result = has_permission_result and \
- check_object_permission(request, endpoint, Permissions.Endpoint_View, Permissions.Endpoint_Edit, Permissions.Endpoint_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ endpoint,
+ Permissions.Endpoint_View,
+ Permissions.Endpoint_Edit,
+ Permissions.Endpoint_Edit,
+ )
+ )
return has_permission_result
class UserHasToolProductSettingsPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Product_Edit)
+ return check_post_permission(
+ request, Product, "product", Permissions.Product_Edit
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj.product, Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Edit)
+ return check_object_permission(
+ request,
+ obj.product,
+ Permissions.Product_View,
+ Permissions.Product_Edit,
+ Permissions.Product_Edit,
+ )
class UserHasEndpointPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Endpoint_Add)
+ return check_post_permission(
+ request, Product, "product", Permissions.Endpoint_Add
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Endpoint_View, Permissions.Endpoint_Edit, Permissions.Endpoint_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Endpoint_View,
+ Permissions.Endpoint_Edit,
+ Permissions.Endpoint_Delete,
+ )
class UserHasEndpointStatusPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Endpoint, 'endpoint', Permissions.Endpoint_Edit)
+ return check_post_permission(
+ request, Endpoint, "endpoint", Permissions.Endpoint_Edit
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj.endpoint, Permissions.Endpoint_View, Permissions.Endpoint_Edit, Permissions.Endpoint_Edit)
+ return check_object_permission(
+ request,
+ obj.endpoint,
+ Permissions.Endpoint_View,
+ Permissions.Endpoint_Edit,
+ Permissions.Endpoint_Edit,
+ )
class UserHasEngagementPermission(permissions.BasePermission):
# Permission checks for related objects (like notes or metadata) can be moved
# into a seperate class, when the legacy authorization will be removed.
- path_engagement_post = re.compile(r'^/api/v2/engagements/$')
- path_engagement = re.compile(r'^/api/v2/engagements/\d+/$')
+ path_engagement_post = re.compile(r"^/api/v2/engagements/$")
+ path_engagement = re.compile(r"^/api/v2/engagements/\d+/$")
def has_permission(self, request, view):
- if UserHasEngagementPermission.path_engagement_post.match(request.path) or \
- UserHasEngagementPermission.path_engagement.match(request.path):
- return check_post_permission(request, Product, 'product', Permissions.Engagement_Add)
+ if UserHasEngagementPermission.path_engagement_post.match(
+ request.path
+ ) or UserHasEngagementPermission.path_engagement.match(request.path):
+ return check_post_permission(
+ request, Product, "product", Permissions.Engagement_Add
+ )
else:
# related object only need object permission
return True
def has_object_permission(self, request, view, obj):
- if UserHasEngagementPermission.path_engagement_post.match(request.path) or \
- UserHasEngagementPermission.path_engagement.match(request.path):
- return check_object_permission(request, obj, Permissions.Engagement_View, Permissions.Engagement_Edit, Permissions.Engagement_Delete)
+ if UserHasEngagementPermission.path_engagement_post.match(
+ request.path
+ ) or UserHasEngagementPermission.path_engagement.match(request.path):
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Engagement_View,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Delete,
+ )
else:
- return check_object_permission(request, obj, Permissions.Engagement_View, Permissions.Engagement_Edit, Permissions.Engagement_Edit, Permissions.Engagement_Edit)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Engagement_View,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Edit,
+ )
+
+
+class UserHasRiskAcceptancePermission(permissions.BasePermission):
+ # Permission checks for related objects (like notes or metadata) can be moved
+ # into a seperate class, when the legacy authorization will be removed.
+ path_risk_acceptance_post = re.compile(r"^/api/v2/risk_acceptances/$")
+ path_risk_acceptance = re.compile(r"^/api/v2/risk_acceptances/\d+/$")
+
+ def has_permission(self, request, view):
+ if UserHasRiskAcceptancePermission.path_risk_acceptance_post.match(
+ request.path
+ ) or UserHasRiskAcceptancePermission.path_risk_acceptance.match(
+ request.path
+ ):
+ return check_post_permission(
+ request, Product, "product", Permissions.Risk_Acceptance
+ )
+ else:
+ # related object only need object permission
+ return True
+
+ def has_object_permission(self, request, view, obj):
+ if UserHasRiskAcceptancePermission.path_risk_acceptance_post.match(
+ request.path
+ ) or UserHasRiskAcceptancePermission.path_risk_acceptance.match(
+ request.path
+ ):
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Risk_Acceptance,
+ Permissions.Risk_Acceptance,
+ Permissions.Risk_Acceptance,
+ )
+ else:
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Risk_Acceptance,
+ Permissions.Risk_Acceptance,
+ Permissions.Risk_Acceptance,
+ Permissions.Risk_Acceptance,
+ )
class UserHasFindingPermission(permissions.BasePermission):
# Permission checks for related objects (like notes or metadata) can be moved
# into a seperate class, when the legacy authorization will be removed.
- path_finding_post = re.compile(r'^/api/v2/findings/$')
- path_finding = re.compile(r'^/api/v2/findings/\d+/$')
- path_stub_finding_post = re.compile(r'^/api/v2/stub_findings/$')
- path_stub_finding = re.compile(r'^/api/v2/stub_findings/\d+/$')
+ path_finding_post = re.compile(r"^/api/v2/findings/$")
+ path_finding = re.compile(r"^/api/v2/findings/\d+/$")
+ path_stub_finding_post = re.compile(r"^/api/v2/stub_findings/$")
+ path_stub_finding = re.compile(r"^/api/v2/stub_findings/\d+/$")
def has_permission(self, request, view):
- if UserHasFindingPermission.path_finding_post.match(request.path) or \
- UserHasFindingPermission.path_finding.match(request.path) or \
- UserHasFindingPermission.path_stub_finding_post.match(request.path) or \
- UserHasFindingPermission.path_stub_finding.match(request.path):
- return check_post_permission(request, Test, 'test', Permissions.Finding_Add)
+ if (
+ UserHasFindingPermission.path_finding_post.match(request.path)
+ or UserHasFindingPermission.path_finding.match(request.path)
+ or UserHasFindingPermission.path_stub_finding_post.match(
+ request.path
+ )
+ or UserHasFindingPermission.path_stub_finding.match(request.path)
+ ):
+ return check_post_permission(
+ request, Test, "test", Permissions.Finding_Add
+ )
else:
# related object only need object permission
return True
def has_object_permission(self, request, view, obj):
- if UserHasFindingPermission.path_finding_post.match(request.path) or \
- UserHasFindingPermission.path_finding.match(request.path) or \
- UserHasFindingPermission.path_stub_finding_post.match(request.path) or \
- UserHasFindingPermission.path_stub_finding.match(request.path):
- return check_object_permission(request, obj, Permissions.Finding_View, Permissions.Finding_Edit, Permissions.Finding_Delete)
+ if (
+ UserHasFindingPermission.path_finding_post.match(request.path)
+ or UserHasFindingPermission.path_finding.match(request.path)
+ or UserHasFindingPermission.path_stub_finding_post.match(
+ request.path
+ )
+ or UserHasFindingPermission.path_stub_finding.match(request.path)
+ ):
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Finding_View,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Delete,
+ )
else:
- return check_object_permission(request, obj, Permissions.Finding_View, Permissions.Finding_Edit, Permissions.Finding_Edit, Permissions.Finding_Edit)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Finding_View,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Edit,
+ )
class UserHasImportPermission(permissions.BasePermission):
@@ -188,25 +429,60 @@ def has_permission(self, request, view):
# permission check takes place before validation, so we don't have access to serializer.validated_data()
# and we have to validate ourselves unfortunately
- _, _, _, engagement_id, engagement_name, product_name, product_type_name, auto_create_context = get_import_meta_data_from_dict(request.data)
+ (
+ _,
+ _,
+ _,
+ engagement_id,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = get_import_meta_data_from_dict(request.data)
product_type = get_target_product_type_if_exists(product_type_name)
product = get_target_product_if_exists(product_name, product_type_name)
- engagement = get_target_engagement_if_exists(engagement_id, engagement_name, product)
+ engagement = get_target_engagement_if_exists(
+ engagement_id, engagement_name, product
+ )
if engagement:
# existing engagement, nothing special to check
- return user_has_permission(request.user, engagement, Permissions.Import_Scan_Result)
+ return user_has_permission(
+ request.user, engagement, Permissions.Import_Scan_Result
+ )
elif engagement_id:
# engagement_id doesn't exist
- raise serializers.ValidationError("Engagement '%s' doesn''t exist" % engagement_id)
+ raise serializers.ValidationError(
+ "Engagement '%s' doesn''t exist" % engagement_id
+ )
if not auto_create_context:
- raise_no_auto_create_import_validation_error(None, None, engagement_name, product_name, product_type_name, engagement, product, product_type,
- "Need engagement_id or product_name + engagement_name to perform import")
+ raise_no_auto_create_import_validation_error(
+ None,
+ None,
+ engagement_name,
+ product_name,
+ product_type_name,
+ engagement,
+ product,
+ product_type,
+ "Need engagement_id or product_name + engagement_name to perform import",
+ )
else:
- # the engagement doesn't exist, so we need to check if the user has requested and is allowed to use auto_create
- return check_auto_create_permission(request.user, product, product_name, engagement, engagement_name, product_type, product_type_name,
- "Need engagement_id or product_name + engagement_name to perform import")
+ # the engagement doesn't exist, so we need to check if the user has
+ # requested and is allowed to use auto_create
+ return check_auto_create_permission(
+ request.user,
+ product,
+ product_name,
+ engagement,
+ engagement_name,
+ product_type,
+ product_type_name,
+ "Need engagement_id or product_name + engagement_name to perform import",
+ )
class UserHasMetaImportPermission(permissions.BasePermission):
@@ -214,7 +490,18 @@ def has_permission(self, request, view):
# permission check takes place before validation, so we don't have access to serializer.validated_data()
# and we have to validate ourselves unfortunately
- _, _, _, _, _, product_name, _, _ = get_import_meta_data_from_dict(request.data)
+ (
+ _,
+ _,
+ _,
+ _,
+ _,
+ product_name,
+ _,
+ _,
+ _,
+ _,
+ ) = get_import_meta_data_from_dict(request.data)
product = get_target_product_if_exists(product_name)
if not product:
product_id = get_product_id_from_dict(request.data)
@@ -222,63 +509,126 @@ def has_permission(self, request, view):
if product:
# existing product, nothing special to check
- return user_has_permission(request.user, product, Permissions.Import_Scan_Result)
+ return user_has_permission(
+ request.user, product, Permissions.Import_Scan_Result
+ )
elif product_id:
# product_id doesn't exist
- raise serializers.ValidationError("product '%s' doesn''t exist" % product_id)
+ raise serializers.ValidationError(
+ "product '%s' doesn''t exist" % product_id
+ )
else:
- raise serializers.ValidationError("Need product_id or product_name to perform import")
+ raise serializers.ValidationError(
+ "Need product_id or product_name to perform import"
+ )
class UserHasProductPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product_Type, 'prod_type', Permissions.Product_Type_Add_Product)
+ return check_post_permission(
+ request,
+ Product_Type,
+ "prod_type",
+ Permissions.Product_Type_Add_Product,
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_View,
+ Permissions.Product_Edit,
+ Permissions.Product_Delete,
+ )
class UserHasProductMemberPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Product_Manage_Members)
+ return check_post_permission(
+ request, Product, "product", Permissions.Product_Manage_Members
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_View, Permissions.Product_Manage_Members, Permissions.Product_Member_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_View,
+ Permissions.Product_Manage_Members,
+ Permissions.Product_Member_Delete,
+ )
class UserHasProductGroupPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Product_Group_Add)
+ return check_post_permission(
+ request, Product, "product", Permissions.Product_Group_Add
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_Group_View, Permissions.Product_Group_Edit, Permissions.Product_Group_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_Group_View,
+ Permissions.Product_Group_Edit,
+ Permissions.Product_Group_Delete,
+ )
class UserHasProductTypePermission(permissions.BasePermission):
def has_permission(self, request, view):
- if request.method == 'POST':
- return user_has_global_permission(request.user, Permissions.Product_Type_Add)
+ if request.method == "POST":
+ return user_has_global_permission(
+ request.user, Permissions.Product_Type_Add
+ )
else:
return True
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_Type_View, Permissions.Product_Type_Edit, Permissions.Product_Type_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_Type_View,
+ Permissions.Product_Type_Edit,
+ Permissions.Product_Type_Delete,
+ )
class UserHasProductTypeMemberPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product_Type, 'product_type', Permissions.Product_Type_Manage_Members)
+ return check_post_permission(
+ request,
+ Product_Type,
+ "product_type",
+ Permissions.Product_Type_Manage_Members,
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_Type_View, Permissions.Product_Type_Manage_Members, Permissions.Product_Type_Member_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_Type_View,
+ Permissions.Product_Type_Manage_Members,
+ Permissions.Product_Type_Member_Delete,
+ )
class UserHasProductTypeGroupPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product_Type, 'product_type', Permissions.Product_Type_Group_Add)
+ return check_post_permission(
+ request,
+ Product_Type,
+ "product_type",
+ Permissions.Product_Type_Group_Add,
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_Type_Group_View, Permissions.Product_Type_Group_Edit, Permissions.Product_Type_Group_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_Type_Group_View,
+ Permissions.Product_Type_Group_Edit,
+ Permissions.Product_Type_Group_Delete,
+ )
class UserHasReimportPermission(permissions.BasePermission):
@@ -286,89 +636,178 @@ def has_permission(self, request, view):
# permission check takes place before validation, so we don't have access to serializer.validated_data()
# and we have to validate ourselves unfortunately
- test_id, test_title, scan_type, _, engagement_name, product_name, product_type_name, auto_create_context = get_import_meta_data_from_dict(request.data)
+ (
+ test_id,
+ test_title,
+ scan_type,
+ _,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = get_import_meta_data_from_dict(request.data)
product_type = get_target_product_type_if_exists(product_type_name)
product = get_target_product_if_exists(product_name, product_type_name)
- engagement = get_target_engagement_if_exists(None, engagement_name, product)
- test = get_target_test_if_exists(test_id, test_title, scan_type, engagement)
+ engagement = get_target_engagement_if_exists(
+ None, engagement_name, product
+ )
+ test = get_target_test_if_exists(
+ test_id, test_title, scan_type, engagement
+ )
if test:
# existing test, nothing special to check
- return user_has_permission(request.user, test, Permissions.Import_Scan_Result)
+ return user_has_permission(
+ request.user, test, Permissions.Import_Scan_Result
+ )
elif test_id:
# test_id doesn't exist
- raise serializers.ValidationError("Test '%s' doesn't exist" % test_id)
+ raise serializers.ValidationError(
+ "Test '%s' doesn't exist" % test_id
+ )
if not auto_create_context:
- raise_no_auto_create_import_validation_error(test_title, scan_type, engagement_name, product_name, product_type_name, engagement, product, product_type,
- "Need test_id or product_name + engagement_name + scan_type to perform reimport")
+ raise_no_auto_create_import_validation_error(
+ test_title,
+ scan_type,
+ engagement_name,
+ product_name,
+ product_type_name,
+ engagement,
+ product,
+ product_type,
+ "Need test_id or product_name + engagement_name + scan_type to perform reimport",
+ )
else:
- # the test doesn't exist, so we need to check if the user has requested and is allowed to use auto_create
- return check_auto_create_permission(request.user, product, product_name, engagement, engagement_name, product_type, product_type_name,
- "Need test_id or product_name + engagement_name + scan_type to perform reimport")
+ # the test doesn't exist, so we need to check if the user has
+ # requested and is allowed to use auto_create
+ return check_auto_create_permission(
+ request.user,
+ product,
+ product_name,
+ engagement,
+ engagement_name,
+ product_type,
+ product_type_name,
+ "Need test_id or product_name + engagement_name + scan_type to perform reimport",
+ )
class UserHasTestPermission(permissions.BasePermission):
# Permission checks for related objects (like notes or metadata) can be moved
# into a seperate class, when the legacy authorization will be removed.
- path_tests_post = re.compile(r'^/api/v2/tests/$')
- path_tests = re.compile(r'^/api/v2/tests/\d+/$')
+ path_tests_post = re.compile(r"^/api/v2/tests/$")
+ path_tests = re.compile(r"^/api/v2/tests/\d+/$")
def has_permission(self, request, view):
- if UserHasTestPermission.path_tests_post.match(request.path) or \
- UserHasTestPermission.path_tests.match(request.path):
- return check_post_permission(request, Engagement, 'engagement', Permissions.Test_Add)
+ if UserHasTestPermission.path_tests_post.match(
+ request.path
+ ) or UserHasTestPermission.path_tests.match(request.path):
+ return check_post_permission(
+ request, Engagement, "engagement", Permissions.Test_Add
+ )
else:
# related object only need object permission
return True
def has_object_permission(self, request, view, obj):
- if UserHasTestPermission.path_tests_post.match(request.path) or \
- UserHasTestPermission.path_tests.match(request.path):
- return check_object_permission(request, obj, Permissions.Test_View, Permissions.Test_Edit, Permissions.Test_Delete)
+ if UserHasTestPermission.path_tests_post.match(
+ request.path
+ ) or UserHasTestPermission.path_tests.match(request.path):
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Test_View,
+ Permissions.Test_Edit,
+ Permissions.Test_Delete,
+ )
else:
- return check_object_permission(request, obj, Permissions.Test_View, Permissions.Test_Edit, Permissions.Test_Edit, Permissions.Test_Edit)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Test_View,
+ Permissions.Test_Edit,
+ Permissions.Test_Edit,
+ Permissions.Test_Edit,
+ )
class UserHasTestImportPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Test, 'test', Permissions.Test_Edit)
+ return check_post_permission(
+ request, Test, "test", Permissions.Test_Edit
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj.test, Permissions.Test_View, Permissions.Test_Edit, Permissions.Test_Delete)
+ return check_object_permission(
+ request,
+ obj.test,
+ Permissions.Test_View,
+ Permissions.Test_Edit,
+ Permissions.Test_Delete,
+ )
class UserHasLanguagePermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Language_Add)
+ return check_post_permission(
+ request, Product, "product", Permissions.Language_Add
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Language_View, Permissions.Language_Edit, Permissions.Language_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Language_View,
+ Permissions.Language_Edit,
+ Permissions.Language_Delete,
+ )
class UserHasProductAPIScanConfigurationPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Product_API_Scan_Configuration_Add)
+ return check_post_permission(
+ request,
+ Product,
+ "product",
+ Permissions.Product_API_Scan_Configuration_Add,
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj, Permissions.Product_API_Scan_Configuration_View, Permissions.Product_API_Scan_Configuration_Edit, Permissions.Product_API_Scan_Configuration_Delete)
+ return check_object_permission(
+ request,
+ obj,
+ Permissions.Product_API_Scan_Configuration_View,
+ Permissions.Product_API_Scan_Configuration_Edit,
+ Permissions.Product_API_Scan_Configuration_Delete,
+ )
class UserHasJiraProductPermission(permissions.BasePermission):
def has_permission(self, request, view):
- if request.method == 'POST':
+ if request.method == "POST":
has_permission_result = True
- engagement_id = request.data.get('engagement', None)
+ engagement_id = request.data.get("engagement", None)
if engagement_id:
object = get_object_or_404(Engagement, pk=engagement_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Engagement_Edit)
- product_id = request.data.get('product', None)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Engagement_Edit
+ )
+ )
+ product_id = request.data.get("product", None)
if product_id:
object = get_object_or_404(Product, pk=product_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Product_Edit)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Product_Edit
+ )
+ )
return has_permission_result
else:
return True
@@ -377,34 +816,62 @@ def has_object_permission(self, request, view, obj):
has_permission_result = True
engagement = obj.engagement
if engagement:
- has_permission_result = has_permission_result and \
- check_object_permission(request, engagement, Permissions.Engagement_View, Permissions.Engagement_Edit, Permissions.Engagement_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ engagement,
+ Permissions.Engagement_View,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Edit,
+ )
+ )
product = obj.product
if product:
- has_permission_result = has_permission_result and \
- check_object_permission(request, product, Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ product,
+ Permissions.Product_View,
+ Permissions.Product_Edit,
+ Permissions.Product_Edit,
+ )
+ )
return has_permission_result
class UserHasJiraIssuePermission(permissions.BasePermission):
def has_permission(self, request, view):
- if request.method == 'POST':
+ if request.method == "POST":
has_permission_result = True
- engagement_id = request.data.get('engagement', None)
+ engagement_id = request.data.get("engagement", None)
if engagement_id:
object = get_object_or_404(Engagement, pk=engagement_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Engagement_Edit)
- finding_id = request.data.get('finding', None)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Engagement_Edit
+ )
+ )
+ finding_id = request.data.get("finding", None)
if finding_id:
object = get_object_or_404(Finding, pk=finding_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Finding_Edit)
- finding_group_id = request.data.get('finding_group', None)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Finding_Edit
+ )
+ )
+ finding_group_id = request.data.get("finding_group", None)
if finding_group_id:
object = get_object_or_404(Finding_Group, pk=finding_group_id)
- has_permission_result = has_permission_result and \
- user_has_permission(request.user, object, Permissions.Finding_Group_Edit)
+ has_permission_result = (
+ has_permission_result
+ and user_has_permission(
+ request.user, object, Permissions.Finding_Group_Edit
+ )
+ )
return has_permission_result
else:
return True
@@ -413,16 +880,40 @@ def has_object_permission(self, request, view, obj):
has_permission_result = True
engagement = obj.engagement
if engagement:
- has_permission_result = has_permission_result and \
- check_object_permission(request, engagement, Permissions.Engagement_View, Permissions.Engagement_Edit, Permissions.Engagement_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ engagement,
+ Permissions.Engagement_View,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Edit,
+ )
+ )
finding = obj.finding
if finding:
- has_permission_result = has_permission_result and \
- check_object_permission(request, finding, Permissions.Finding_View, Permissions.Finding_Edit, Permissions.Finding_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ finding,
+ Permissions.Finding_View,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Edit,
+ )
+ )
finding_group = obj.finding_group
if finding_group:
- has_permission_result = has_permission_result and \
- check_object_permission(request, finding_group, Permissions.Finding_Group_View, Permissions.Finding_Group_Edit, Permissions.Finding_Group_Edit)
+ has_permission_result = (
+ has_permission_result
+ and check_object_permission(
+ request,
+ finding_group,
+ Permissions.Finding_Group_View,
+ Permissions.Finding_Group_Edit,
+ Permissions.Finding_Group_Edit,
+ )
+ )
return has_permission_result
@@ -433,13 +924,32 @@ def has_permission(self, request, view):
class UserHasEngagementPresetPermission(permissions.BasePermission):
def has_permission(self, request, view):
- return check_post_permission(request, Product, 'product', Permissions.Product_Edit)
+ return check_post_permission(
+ request, Product, "product", Permissions.Product_Edit
+ )
def has_object_permission(self, request, view, obj):
- return check_object_permission(request, obj.product, Permissions.Product_View, Permissions.Product_Edit, Permissions.Product_Edit, Permissions.Product_Edit)
-
-
-def raise_no_auto_create_import_validation_error(test_title, scan_type, engagement_name, product_name, product_type_name, engagement, product, product_type, error_message):
+ return check_object_permission(
+ request,
+ obj.product,
+ Permissions.Product_View,
+ Permissions.Product_Edit,
+ Permissions.Product_Edit,
+ Permissions.Product_Edit,
+ )
+
+
+def raise_no_auto_create_import_validation_error(
+ test_title,
+ scan_type,
+ engagement_name,
+ product_name,
+ product_type_name,
+ engagement,
+ product,
+ product_type,
+ error_message,
+):
# check for mandatory fields first
if not product_name:
raise ValidationError("product_name parameter missing")
@@ -448,28 +958,53 @@ def raise_no_auto_create_import_validation_error(test_title, scan_type, engageme
raise ValidationError("engagement_name parameter missing")
if product_type_name and not product_type:
- raise serializers.ValidationError("Product Type '%s' doesn't exist" % (product_type_name))
+ raise serializers.ValidationError(
+ "Product Type '%s' doesn't exist" % (product_type_name)
+ )
if product_name and not product:
if product_type_name:
- raise serializers.ValidationError("Product '%s' doesn't exist in Product_Type '%s'" % (product_name, product_type_name))
+ raise serializers.ValidationError(
+ "Product '%s' doesn't exist in Product_Type '%s'"
+ % (product_name, product_type_name)
+ )
else:
- raise serializers.ValidationError("Product '%s' doesn't exist" % product_name)
+ raise serializers.ValidationError(
+ "Product '%s' doesn't exist" % product_name
+ )
if engagement_name and not engagement:
- raise serializers.ValidationError("Engagement '%s' doesn't exist in Product '%s'" % (engagement_name, product_name))
+ raise serializers.ValidationError(
+ "Engagement '%s' doesn't exist in Product '%s'"
+ % (engagement_name, product_name)
+ )
# these are only set for reimport
if test_title:
- raise serializers.ValidationError("Test '%s' with scan_type '%s' doesn't exist in Engagement '%s'" % (test_title, scan_type, engagement_name))
+ raise serializers.ValidationError(
+ "Test '%s' with scan_type '%s' doesn't exist in Engagement '%s'"
+ % (test_title, scan_type, engagement_name)
+ )
if scan_type:
- raise serializers.ValidationError("Test with scan_type '%s' doesn't exist in Engagement '%s'" % (scan_type, engagement_name))
+ raise serializers.ValidationError(
+ "Test with scan_type '%s' doesn't exist in Engagement '%s'"
+ % (scan_type, engagement_name)
+ )
raise ValidationError(error_message)
-def check_auto_create_permission(user, product, product_name, engagement, engagement_name, product_type, product_type_name, error_message):
+def check_auto_create_permission(
+ user,
+ product,
+ product_name,
+ engagement,
+ engagement_name,
+ product_type,
+ product_type_name,
+ error_message,
+):
"""
For an existing engagement, to be allowed to import a scan, the following must all be True:
- User must have Import_Scan_Result permission for this Engagement
@@ -493,30 +1028,54 @@ def check_auto_create_permission(user, product, product_name, engagement, engage
if engagement:
# existing engagement, nothing special to check
- return user_has_permission(user, engagement, Permissions.Import_Scan_Result)
+ return user_has_permission(
+ user, engagement, Permissions.Import_Scan_Result
+ )
if product and product_name and engagement_name:
if not user_has_permission(user, product, Permissions.Engagement_Add):
- raise PermissionDenied("No permission to create engagements in product '%s'", product_name)
-
- if not user_has_permission(user, product, Permissions.Import_Scan_Result):
- raise PermissionDenied("No permission to import scans into product '%s'", product_name)
+ raise PermissionDenied(
+ "No permission to create engagements in product '%s'"
+ % product_name
+ )
+
+ if not user_has_permission(
+ user, product, Permissions.Import_Scan_Result
+ ):
+ raise PermissionDenied(
+ "No permission to import scans into product '%s'"
+ % product_name
+ )
# all good
return True
if not product and product_name:
if not product_type_name:
- raise serializers.ValidationError("Product '%s' doesn't exist and no product_type_name provided to create the new product in" % product_name)
+ raise serializers.ValidationError(
+ "Product '%s' doesn't exist and no product_type_name provided to create the new product in"
+ % product_name
+ )
if not product_type:
- if not user_has_global_permission(user, Permissions.Product_Type_Add):
- raise PermissionDenied("No permission to create product_type '%s'", product_type_name)
- # new product type can be created with current user as owner, so all objects in it can be created as well
+ if not user_has_global_permission(
+ user, Permissions.Product_Type_Add
+ ):
+ raise PermissionDenied(
+ "No permission to create product_type '%s'"
+ % product_type_name
+ )
+ # new product type can be created with current user as owner, so
+ # all objects in it can be created as well
return True
else:
- if not user_has_permission(user, product_type, Permissions.Product_Type_Add_Product):
- raise PermissionDenied("No permission to create products in product_type '%s'", product_type)
+ if not user_has_permission(
+ user, product_type, Permissions.Product_Type_Add_Product
+ ):
+ raise PermissionDenied(
+ "No permission to create products in product_type '%s'"
+ % product_type
+ )
# product can be created, so objects in it can be created as well
return True
@@ -525,20 +1084,34 @@ def check_auto_create_permission(user, product, product_name, engagement, engage
class UserHasConfigurationPermissionStaff(permissions.DjangoModelPermissions):
+ # Override map to also provide 'view' permissions
+ perms_map = {
+ "GET": ["%(app_label)s.view_%(model_name)s"],
+ "OPTIONS": [],
+ "HEAD": [],
+ "POST": ["%(app_label)s.add_%(model_name)s"],
+ "PUT": ["%(app_label)s.change_%(model_name)s"],
+ "PATCH": ["%(app_label)s.change_%(model_name)s"],
+ "DELETE": ["%(app_label)s.delete_%(model_name)s"],
+ }
+
+ def has_permission(self, request, view):
+ return super().has_permission(request, view)
+
+class UserHasConfigurationPermissionSuperuser(
+ permissions.DjangoModelPermissions
+):
# Override map to also provide 'view' permissions
perms_map = {
- 'GET': ['%(app_label)s.view_%(model_name)s'],
- 'OPTIONS': [],
- 'HEAD': [],
- 'POST': ['%(app_label)s.add_%(model_name)s'],
- 'PUT': ['%(app_label)s.change_%(model_name)s'],
- 'PATCH': ['%(app_label)s.change_%(model_name)s'],
- 'DELETE': ['%(app_label)s.delete_%(model_name)s'],
+ "GET": ["%(app_label)s.view_%(model_name)s"],
+ "OPTIONS": [],
+ "HEAD": [],
+ "POST": ["%(app_label)s.add_%(model_name)s"],
+ "PUT": ["%(app_label)s.change_%(model_name)s"],
+ "PATCH": ["%(app_label)s.change_%(model_name)s"],
+ "DELETE": ["%(app_label)s.delete_%(model_name)s"],
}
def has_permission(self, request, view):
- if settings.FEATURE_CONFIGURATION_AUTHORIZATION:
- return super().has_permission(request, view)
- else:
- return request.user.is_staff
+ return super().has_permission(request, view)
diff --git a/dojo/api_v2/prefetch/__init__.py b/dojo/api_v2/prefetch/__init__.py
index 6a4a3384843..f0449c7b303 100644
--- a/dojo/api_v2/prefetch/__init__.py
+++ b/dojo/api_v2/prefetch/__init__.py
@@ -1,4 +1,4 @@
from .mixins import PrefetchListMixin, PrefetchRetrieveMixin
from .schema import get_prefetch_schema
-__all__ = ['PrefetchListMixin', 'PrefetchRetrieveMixin', 'get_prefetch_schema']
+__all__ = ["PrefetchListMixin", "PrefetchRetrieveMixin", "get_prefetch_schema"]
diff --git a/dojo/api_v2/prefetch/mixins.py b/dojo/api_v2/prefetch/mixins.py
index 198c52994df..b43a44c7d7c 100644
--- a/dojo/api_v2/prefetch/mixins.py
+++ b/dojo/api_v2/prefetch/mixins.py
@@ -8,7 +8,8 @@ def list(self, request, *args, **kwargs):
prefetch_params = request.GET.get("prefetch", "").split(",")
prefetcher = _Prefetcher()
- # Apply the same operations as the standard list method defined in the django rest framework
+ # Apply the same operations as the standard list method defined in the
+ # django rest framework
queryset = self.filter_queryset(self.get_queryset())
queryset = self.paginate_queryset(queryset)
diff --git a/dojo/api_v2/prefetch/prefetcher.py b/dojo/api_v2/prefetch/prefetcher.py
index 9da142a9711..5d228165f59 100644
--- a/dojo/api_v2/prefetch/prefetcher.py
+++ b/dojo/api_v2/prefetch/prefetcher.py
@@ -7,7 +7,7 @@
SERIALIZER_DEFS_MODULE = "dojo.api_v2.serializers"
-class _Prefetcher():
+class _Prefetcher:
@staticmethod
def _build_serializers():
"""Returns a map model -> serializer where model is a django model and serializer is the corresponding
@@ -16,13 +16,16 @@ def _build_serializers():
Returns:
dict[model, serializer]: map of model to their serializer
"""
+
def _is_model_serializer(obj):
return inspect.isclass(obj) and issubclass(obj, ModelSerializer)
serializers = dict()
# We process all the serializers found in the module SERIALIZER_DEFS_MODULE. We restrict the scope to avoid
# processing all the classes in the symbol table
- available_serializers = inspect.getmembers(sys.modules[SERIALIZER_DEFS_MODULE], _is_model_serializer)
+ available_serializers = inspect.getmembers(
+ sys.modules[SERIALIZER_DEFS_MODULE], _is_model_serializer
+ )
for _, serializer in available_serializers:
model = serializer.Meta.model
@@ -80,9 +83,13 @@ def _prefetch(self, entry, fields_to_fetch):
# Check if the field represents a many-to-many relationship as we need to instantiate
# the serializer accordingly
many = utils._is_many_to_many_relation(field_meta)
- field_data = extra_serializer(many=many).to_representation(field_value)
+ field_data = extra_serializer(many=many).to_representation(
+ field_value
+ )
# For convenience in processing we store the field data in a list
- field_data_list = field_data if type(field_data) is list else [field_data]
+ field_data_list = (
+ field_data if isinstance(field_data, list) else [field_data]
+ )
if field_to_fetch not in self._prefetch_data:
self._prefetch_data[field_to_fetch] = dict()
diff --git a/dojo/api_v2/prefetch/schema.py b/dojo/api_v2/prefetch/schema.py
index 568e45398b5..6d04e751800 100644
--- a/dojo/api_v2/prefetch/schema.py
+++ b/dojo/api_v2/prefetch/schema.py
@@ -6,47 +6,99 @@
def get_prefetch_schema(methods, serializer):
- """ Swagger / OpenAPI v2 (drf-yasg) Return a composable swagger schema that contains in the query the fields that can be prefetch from the model
- supported by the serializer and in the reponse the structure of these fields in a new top-level attribute
- named prefetch.
+ """Swagger / OpenAPI v2 (drf-yasg) Return a composable swagger schema that contains in the query the fields that can be prefetch from the model
+ supported by the serializer and in the reponse the structure of these fields in a new top-level attribute
+ named prefetch.
- Returns:
- ComposableSchema: A swagger schema
+ Returns:
+ ComposableSchema: A swagger schema
"""
prefetcher = _Prefetcher()
fields = _get_prefetchable_fields(serializer())
- field_to_serializer = dict([(name, prefetcher._find_serializer(field_type)) for name, field_type in fields if prefetcher._find_serializer(field_type)])
- fields_to_refname = dict([(name, utils.get_serializer_ref_name(serializer())) for name, serializer in field_to_serializer.items()])
- fields_name = [name for name, field_type in fields if prefetcher._find_serializer(field_type)]
+ field_to_serializer = dict(
+ [
+ (name, prefetcher._find_serializer(field_type))
+ for name, field_type in fields
+ if prefetcher._find_serializer(field_type)
+ ]
+ )
+ fields_to_refname = dict(
+ [
+ (name, utils.get_serializer_ref_name(serializer()))
+ for name, serializer in field_to_serializer.items()
+ ]
+ )
+ fields_name = [
+ name
+ for name, field_type in fields
+ if prefetcher._find_serializer(field_type)
+ ]
# New openapi parameter corresponding to the prefetchable fields
- prefetch_params = [openapi.Parameter("prefetch", in_=openapi.IN_QUERY, required=False, type=openapi.TYPE_ARRAY, items=openapi.Items(type=openapi.TYPE_STRING, enum=fields_name))]
-
- additional_props = dict([(name, openapi.Schema(type=openapi.TYPE_OBJECT, read_only=True, additional_properties=LazySchemaRef(fields_to_refname[name], True))) for name in fields_name])
- prefetch_response = {"200": {"prefetch": openapi.Schema(type=openapi.TYPE_OBJECT, properties=additional_props)}}
+ prefetch_params = [
+ openapi.Parameter(
+ "prefetch",
+ in_=openapi.IN_QUERY,
+ required=False,
+ type=openapi.TYPE_ARRAY,
+ items=openapi.Items(type=openapi.TYPE_STRING, enum=fields_name),
+ )
+ ]
+
+ additional_props = dict(
+ [
+ (
+ name,
+ openapi.Schema(
+ type=openapi.TYPE_OBJECT,
+ read_only=True,
+ additional_properties=LazySchemaRef(
+ fields_to_refname[name], True
+ ),
+ ),
+ )
+ for name in fields_name
+ ]
+ )
+ prefetch_response = {
+ "200": {
+ "prefetch": openapi.Schema(
+ type=openapi.TYPE_OBJECT, properties=additional_props
+ )
+ }
+ }
schema = extra_schema.IdentitySchema()
for method in methods:
- schema = schema.composeWith(extra_schema.ExtraParameters(method, prefetch_params))
- schema = schema.composeWith(extra_schema.ExtraResponseField(method, prefetch_response))
+ schema = schema.composeWith(
+ extra_schema.ExtraParameters(method, prefetch_params)
+ )
+ schema = schema.composeWith(
+ extra_schema.ExtraResponseField(method, prefetch_response)
+ )
return schema
def _get_path_to_GET_serializer_map(generator):
path_to_GET_serializer = dict()
- for path, path_pattern, method, view in generator._get_paths_and_endpoints():
+ for (
+ path,
+ path_pattern,
+ method,
+ view,
+ ) in generator._get_paths_and_endpoints():
# print(path, path_pattern, method, view)
- if method == 'GET':
- if hasattr(view, 'get_serializer_class'):
+ if method == "GET":
+ if hasattr(view, "get_serializer_class"):
path_to_GET_serializer[path] = view.get_serializer_class()
return path_to_GET_serializer
def prefetch_postprocessing_hook(result, generator, request, public):
- """ OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin.
+ """OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin.
These have nothing to do with Django prefetch_related.
The endpoints have an @extend_schema configured with an extra parameter 'prefetch'
This parameter contains an array of relations to prefetch. These prefetched models
@@ -56,30 +108,78 @@ def prefetch_postprocessing_hook(result, generator, request, public):
serializer_classes = _get_path_to_GET_serializer_map(generator)
- paths = result.get('paths', {})
+ paths = result.get("paths", {})
for path in paths:
- if 'get' in paths[path] and 'parameters' in paths[path]['get']:
- for parameter in paths[path]['get']['parameters']:
- if parameter['name'] == 'prefetch':
+ if "get" in paths[path] and "parameters" in paths[path]["get"]:
+ for parameter in paths[path]["get"]["parameters"]:
+ if parameter["name"] == "prefetch":
prefetcher = _Prefetcher()
- fields = _get_prefetchable_fields(serializer_classes[path]())
+ fields = _get_prefetchable_fields(
+ serializer_classes[path]()
+ )
- field_names = [name for name, field_type in fields if prefetcher._find_serializer(field_type)]
+ field_names = [
+ name
+ for name, field_type in fields
+ if prefetcher._find_serializer(field_type)
+ ]
- parameter['schema']['type'] = 'array'
- parameter['schema']['items'] = {
- 'type': "string",
- 'enum': field_names
+ parameter["schema"]["type"] = "array"
+ parameter["schema"]["items"] = {
+ "type": "string",
+ "enum": field_names,
}
- field_to_serializer = dict([(name, prefetcher._find_serializer(field_type)) for name, field_type in fields if prefetcher._find_serializer(field_type)])
- fields_to_refname = dict([(name, utils.get_serializer_ref_name(serializer()))
- for name, serializer in field_to_serializer.items()])
- properties = dict([(name, dict([("type", "object"), ("readOnly", True), ("additionalProperties", dict([("$ref", "#/components/schemas/" + fields_to_refname[name])]))]))
- for name in field_names])
- ref = paths[path]['get']['responses']['200']['content']['application/json']['schema']['$ref']
- component_name = ref.split('/')[-1]
- result['components']['schemas'][component_name]['properties']['prefetch'] = dict([("type", "object"), ("properties", properties)])
+ field_to_serializer = dict(
+ [
+ (name, prefetcher._find_serializer(field_type))
+ for name, field_type in fields
+ if prefetcher._find_serializer(field_type)
+ ]
+ )
+ fields_to_refname = dict(
+ [
+ (name, utils.get_serializer_ref_name(serializer()))
+ for name, serializer in field_to_serializer.items()
+ ]
+ )
+ properties = dict(
+ [
+ (
+ name,
+ dict(
+ [
+ ("type", "object"),
+ ("readOnly", True),
+ (
+ "additionalProperties",
+ dict(
+ [
+ (
+ "$ref",
+ "#/components/schemas/"
+ + fields_to_refname[
+ name
+ ],
+ )
+ ]
+ ),
+ ),
+ ]
+ ),
+ )
+ for name in field_names
+ ]
+ )
+ ref = paths[path]["get"]["responses"]["200"]["content"][
+ "application/json"
+ ]["schema"]["$ref"]
+ component_name = ref.split("/")[-1]
+ result["components"]["schemas"][component_name][
+ "properties"
+ ]["prefetch"] = dict(
+ [("type", "object"), ("properties", properties)]
+ )
return result
diff --git a/dojo/api_v2/prefetch/utils.py b/dojo/api_v2/prefetch/utils.py
index f9b76c18f0e..833fe9ae6e4 100644
--- a/dojo/api_v2/prefetch/utils.py
+++ b/dojo/api_v2/prefetch/utils.py
@@ -36,8 +36,11 @@ def _get_prefetchable_fields(serializer):
Args:
serializer (Serializer): [description]
"""
+
def _is_field_prefetchable(field):
- return _is_one_to_one_relation(field) or _is_many_to_many_relation(field)
+ return _is_one_to_one_relation(field) or _is_many_to_many_relation(
+ field
+ )
meta = getattr(serializer, "Meta", None)
if meta is None:
@@ -52,7 +55,7 @@ def _is_field_prefetchable(field):
field = getattr(model, field_name)
if _is_field_prefetchable(field):
# ManyToMany relationship can be reverse
- if hasattr(field, 'reverse') and field.reverse:
+ if hasattr(field, "reverse") and field.reverse:
fields.append((field_name, field.field.model))
else:
fields.append((field_name, field.field.related_model))
diff --git a/dojo/api_v2/schema/__init__.py b/dojo/api_v2/schema/__init__.py
index cd8ea5bb23e..6a69a167022 100644
--- a/dojo/api_v2/schema/__init__.py
+++ b/dojo/api_v2/schema/__init__.py
@@ -1,10 +1,17 @@
-from .extra_schema import IdentitySchema, ExtraParameters, ExtraResponseField, ComposableSchema
+from .extra_schema import (
+ IdentitySchema,
+ ExtraParameters,
+ ExtraResponseField,
+ ComposableSchema,
+)
from .utils import LazySchemaRef, try_apply, resolve_lazy_ref
-__all__ = ['IdentitySchema',
- 'ExtraParameters',
- 'ExtraResponseField',
- 'ComposableSchema',
- 'LazySchemaRef',
- 'try_apply',
- 'resolve_lazy_ref']
+__all__ = [
+ "IdentitySchema",
+ "ExtraParameters",
+ "ExtraResponseField",
+ "ComposableSchema",
+ "LazySchemaRef",
+ "try_apply",
+ "resolve_lazy_ref",
+]
diff --git a/dojo/api_v2/schema/extra_schema.py b/dojo/api_v2/schema/extra_schema.py
index 3dc8e2ba4ac..86fd565e370 100644
--- a/dojo/api_v2/schema/extra_schema.py
+++ b/dojo/api_v2/schema/extra_schema.py
@@ -10,6 +10,7 @@ class ComposableSchema:
yielding a new composable schema whose transformation is defined as the function composition
of the transformation of the two source schema.
"""
+
def transform_operation(self, operation, resolver):
"""Defines an operation transformation
@@ -17,7 +18,6 @@ def transform_operation(self, operation, resolver):
operation (Operation): the operation to transform
resolver (Resolver): the schema refs resolver
"""
- pass
def composeWith(self, schema):
"""Allow two schema to be composed into a new schema.
@@ -36,7 +36,9 @@ def composeWith(self, schema):
class _Wrapper(ComposableSchema):
def transform_operation(self, operation, resolver):
- return schema.transform_operation(op(operation, resolver), resolver)
+ return schema.transform_operation(
+ op(operation, resolver), resolver
+ )
return _Wrapper()
@@ -66,8 +68,8 @@ def transform_operation(self, operation, resolver):
class ExtraParameters(ComposableSchema):
- """Define a schema that can add parameters to the operation
- """
+ """Define a schema that can add parameters to the operation"""
+
def __init__(self, operation_name, extra_parameters, *args, **kwargs):
"""Initialize the schema
@@ -90,8 +92,8 @@ def transform_operation(self, operation, resolver):
class ExtraResponseField(ComposableSchema):
- """Define a schema that can add fields to the responses of the operation
- """
+ """Define a schema that can add fields to the responses of the operation"""
+
def __init__(self, operation_name, extra_fields, *args, **kwargs):
"""Initialize the schema
@@ -123,10 +125,16 @@ def transform_operation(self, operation, resolver):
for code, params in self._extra_fields.items():
if code in responses:
original_schema = responses[code]["schema"]
- schema = original_schema if type(original_schema) is Schema else resolve_ref(original_schema, resolver)
+ schema = (
+ original_schema
+ if isinstance(original_schema, Schema)
+ else resolve_ref(original_schema, resolver)
+ )
schema = copy.deepcopy(schema)
for name, param in params.items():
- schema["properties"][name] = resolve_lazy_ref(param, resolver)
+ schema["properties"][name] = resolve_lazy_ref(
+ param, resolver
+ )
responses[code]["schema"] = schema
return operation
diff --git a/dojo/api_v2/schema/utils.py b/dojo/api_v2/schema/utils.py
index a036fa58281..1276202fc81 100644
--- a/dojo/api_v2/schema/utils.py
+++ b/dojo/api_v2/schema/utils.py
@@ -5,9 +5,12 @@ class LazySchemaRef:
"""Utility class to support SchemaRef definition without knowing the resolver.
The reference can be evaluated later in the context of a swagger generator
"""
+
def __init__(self, schema_name, ignore_unresolved=False):
# Bind curried version of the SchemaRef init
- self.schema_ref = lambda resolver: SchemaRef(resolver, schema_name, ignore_unresolved)
+ self.schema_ref = lambda resolver: SchemaRef(
+ resolver, schema_name, ignore_unresolved
+ )
def apply(self, resolver):
"""Resolve the LazySchemaRef with the given resolver
@@ -31,7 +34,7 @@ def try_apply(obj, resolver):
Returns:
object: the original object if it was not resolve otherwise the resolved LazySchemaRef
"""
- if type(obj) is LazySchemaRef:
+ if isinstance(obj, LazySchemaRef):
return obj.apply(resolver)
else:
return obj
@@ -46,13 +49,15 @@ def resolve_lazy_ref(schema, resolver):
Returns:
object: the schema without LazySchemaRef
"""
- if type(schema) is not Schema:
+ if not isinstance(schema, Schema):
return try_apply(schema, resolver)
if "properties" in schema:
for prop_name, prop in schema["properties"].items():
schema["properties"][prop_name] = resolve_lazy_ref(prop, resolver)
if "additionalProperties" in schema:
- schema["additionalProperties"] = resolve_lazy_ref(schema["additionalProperties"], resolver)
+ schema["additionalProperties"] = resolve_lazy_ref(
+ schema["additionalProperties"], resolver
+ )
return schema
diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py
index fc9cbdd256d..5778f2147ca 100644
--- a/dojo/api_v2/serializers.py
+++ b/dojo/api_v2/serializers.py
@@ -1,3 +1,5 @@
+from dojo.group.utils import get_auth_group_name
+from django.contrib.auth.models import Group
from typing import List
from drf_spectacular.utils import extend_schema_field
from drf_yasg.utils import swagger_serializer_method
@@ -5,39 +7,120 @@
from rest_framework.fields import DictField, MultipleChoiceField
from datetime import datetime
from dojo.endpoint.utils import endpoint_filter
-from dojo.importers.reimporter.utils import get_or_create_engagement, get_target_engagement_if_exists, get_target_product_by_id_if_exists, \
- get_target_product_if_exists, get_target_test_if_exists
-from dojo.models import IMPORT_ACTIONS, SEVERITIES, STATS_FIELDS, Dojo_User, Finding_Group, Product, Engagement, Test, Finding, \
- User, Stub_Finding, Risk_Acceptance, \
- Finding_Template, Test_Type, Development_Environment, NoteHistory, \
- JIRA_Issue, Tool_Product_Settings, Tool_Configuration, Tool_Type, \
- Product_Type, JIRA_Instance, Endpoint, JIRA_Project, \
- Notes, DojoMeta, Note_Type, App_Analysis, Endpoint_Status, \
- Sonarqube_Issue, Sonarqube_Issue_Transition, \
- Regulation, System_Settings, FileUpload, SEVERITY_CHOICES, Test_Import, \
- Test_Import_Finding_Action, Product_Type_Member, Product_Member, \
- Product_Group, Product_Type_Group, Dojo_Group, Role, Global_Role, Dojo_Group_Member, \
- Language_Type, Languages, Notifications, NOTIFICATION_CHOICES, Engagement_Presets, \
- Network_Locations, UserContactInfo, Product_API_Scan_Configuration
-
-from dojo.tools.factory import requires_file, get_choices_sorted, requires_tool_type
+from dojo.importers.reimporter.utils import (
+ get_or_create_engagement,
+ get_target_engagement_if_exists,
+ get_target_product_by_id_if_exists,
+ get_target_product_if_exists,
+ get_target_test_if_exists,
+)
+from dojo.models import (
+ IMPORT_ACTIONS,
+ SEVERITIES,
+ SLA_Configuration,
+ STATS_FIELDS,
+ Dojo_User,
+ Finding_Group,
+ Product,
+ Engagement,
+ Test,
+ Finding,
+ User,
+ Stub_Finding,
+ Risk_Acceptance,
+ Finding_Template,
+ Test_Type,
+ Development_Environment,
+ NoteHistory,
+ JIRA_Issue,
+ Tool_Product_Settings,
+ Tool_Configuration,
+ Tool_Type,
+ Product_Type,
+ JIRA_Instance,
+ Endpoint,
+ JIRA_Project,
+ Cred_Mapping,
+ Notes,
+ DojoMeta,
+ Note_Type,
+ App_Analysis,
+ Endpoint_Status,
+ Cred_User,
+ Sonarqube_Issue,
+ Sonarqube_Issue_Transition,
+ Endpoint_Params,
+ Regulation,
+ System_Settings,
+ FileUpload,
+ SEVERITY_CHOICES,
+ Test_Import,
+ Test_Import_Finding_Action,
+ Product_Type_Member,
+ Product_Member,
+ Product_Group,
+ Product_Type_Group,
+ Dojo_Group,
+ Role,
+ Global_Role,
+ Dojo_Group_Member,
+ Language_Type,
+ Languages,
+ Notifications,
+ NOTIFICATION_CHOICES,
+ Engagement_Presets,
+ Network_Locations,
+ UserContactInfo,
+ Product_API_Scan_Configuration,
+ DEFAULT_NOTIFICATION,
+ Vulnerability_Id,
+ Vulnerability_Id_Template,
+ get_current_date,
+ Question,
+ TextQuestion,
+ ChoiceQuestion,
+ Answer,
+ TextAnswer,
+ ChoiceAnswer,
+ Engagement_Survey,
+ Answered_Survey,
+ General_Survey,
+ Check_List,
+ Announcement,
+)
+
+from dojo.tools.factory import (
+ requires_file,
+ get_choices_sorted,
+ requires_tool_type,
+)
from dojo.utils import is_scan_file_too_large
from django.conf import settings
from rest_framework import serializers
from django.core.exceptions import ValidationError, PermissionDenied
from django.contrib.auth.password_validation import validate_password
+from django.contrib.auth.models import Permission
from django.utils import timezone
+from django.urls import reverse
+from django.db.utils import IntegrityError
import six
-from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import gettext_lazy as _
import json
import dojo.jira_link.helper as jira_helper
import logging
import tagulous
from dojo.endpoint.utils import endpoint_meta_import
from dojo.importers.importer.importer import DojoDefaultImporter as Importer
-from dojo.importers.reimporter.reimporter import DojoDefaultReImporter as ReImporter
+from dojo.importers.reimporter.reimporter import (
+ DojoDefaultReImporter as ReImporter,
+)
from dojo.authorization.authorization import user_has_permission
from dojo.authorization.roles_permissions import Permissions
+from dojo.finding.helper import (
+ save_vulnerability_ids,
+ save_vulnerability_ids_template,
+)
+from dojo.user.utils import get_configuration_permissions_codenames
logger = logging.getLogger(__name__)
@@ -45,41 +128,56 @@
def get_import_meta_data_from_dict(data):
- test_id = data.get('test', None)
+ test_id = data.get("test", None)
if test_id:
if isinstance(test_id, Test):
test_id = test_id.id
elif isinstance(test_id, str) and not test_id.isdigit():
- raise serializers.ValidationError('test must be an integer')
+ raise serializers.ValidationError("test must be an integer")
- scan_type = data.get('scan_type', None)
+ scan_type = data.get("scan_type", None)
- test_title = data.get('test_title', None)
+ test_title = data.get("test_title", None)
- engagement_id = data.get('engagement', None)
+ engagement_id = data.get("engagement", None)
if engagement_id:
if isinstance(engagement_id, Engagement):
engagement_id = engagement_id.id
elif isinstance(engagement_id, str) and not engagement_id.isdigit():
- raise serializers.ValidationError('engagement must be an integer')
+ raise serializers.ValidationError("engagement must be an integer")
- engagement_name = data.get('engagement_name', None)
+ engagement_name = data.get("engagement_name", None)
- product_name = data.get('product_name', None)
- product_type_name = data.get('product_type_name', None)
+ product_name = data.get("product_name", None)
+ product_type_name = data.get("product_type_name", None)
- auto_create_context = data.get('auto_create_context', None)
+ auto_create_context = data.get("auto_create_context", None)
- return test_id, test_title, scan_type, engagement_id, engagement_name, product_name, product_type_name, auto_create_context
+ deduplication_on_engagement = data.get(
+ "deduplication_on_engagement", False
+ )
+ do_not_reactivate = data.get("do_not_reactivate", False)
+ return (
+ test_id,
+ test_title,
+ scan_type,
+ engagement_id,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ )
def get_product_id_from_dict(data):
- product_id = data.get('product', None)
+ product_id = data.get("product", None)
if product_id:
if isinstance(product_id, Product):
product_id = product_id.id
elif isinstance(product_id, str) and not product_id.isdigit():
- raise serializers.ValidationError('product must be an integer')
+ raise serializers.ValidationError("product must be an integer")
return product_id
@@ -96,31 +194,46 @@ def __init__(self, *args, **kwargs):
for sev in SEVERITIES:
self.fields[sev.lower()] = StatusStatisticsSerializer()
- self.fields['total'] = StatusStatisticsSerializer()
+ self.fields["total"] = StatusStatisticsSerializer()
class DeltaStatisticsSerializer(serializers.Serializer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for action in IMPORT_ACTIONS:
- self.fields[action[1].lower()] = SeverityStatusStatisticsSerializer()
+ self.fields[
+ action[1].lower()
+ ] = SeverityStatusStatisticsSerializer()
class ImportStatisticsSerializer(serializers.Serializer):
- before = SeverityStatusStatisticsSerializer(required=False, help_text="Finding statistics as stored in Defect Dojo before the import")
- delta = DeltaStatisticsSerializer(required=False, help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY hass not disabled.")
- after = SeverityStatusStatisticsSerializer(help_text="Finding statistics as stored in Defect Dojo after the import")
+ before = SeverityStatusStatisticsSerializer(
+ required=False,
+ help_text="Finding statistics as stored in Defect Dojo before the import",
+ )
+ delta = DeltaStatisticsSerializer(
+ required=False,
+ help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY hass not disabled.",
+ )
+ after = SeverityStatusStatisticsSerializer(
+ help_text="Finding statistics as stored in Defect Dojo after the import"
+ )
-@extend_schema_field(serializers.ListField(child=serializers.CharField())) # also takes basic python types
+@extend_schema_field(
+ serializers.ListField(child=serializers.CharField())
+) # also takes basic python types
class TagListSerializerField(serializers.ListField):
child = serializers.CharField()
default_error_messages = {
- 'not_a_list': _(
- 'Expected a list of items but got type "{input_type}".'),
- 'invalid_json': _('Invalid json list. A tag list submitted in string'
- ' form must be valid json.'),
- 'not_a_str': _('All list items must be of string type.')
+ "not_a_list": _(
+ 'Expected a list of items but got type "{input_type}".'
+ ),
+ "invalid_json": _(
+ "Invalid json list. A tag list submitted in string"
+ " form must be valid json."
+ ),
+ "not_a_str": _("All list items must be of string type."),
}
order_by = None
@@ -128,7 +241,7 @@ def __init__(self, **kwargs):
pretty_print = kwargs.pop("pretty_print", True)
style = kwargs.pop("style", {})
- kwargs["style"] = {'base_template': 'textarea.html'}
+ kwargs["style"] = {"base_template": "textarea.html"}
kwargs["style"].update(style)
super(TagListSerializerField, self).__init__(**kwargs)
@@ -142,17 +255,17 @@ def to_internal_value(self, data):
try:
data = json.loads(data)
except ValueError:
- self.fail('invalid_json')
+ self.fail("invalid_json")
- logger.debug('data as json: %s', data)
+ logger.debug("data as json: %s", data)
if not isinstance(data, list):
- self.fail('not_a_list', input_type=type(data).__name__)
+ self.fail("not_a_list", input_type=type(data).__name__)
# data_safe = []
for s in data:
if not isinstance(s, six.string_types):
- self.fail('not_a_str')
+ self.fail("not_a_str")
self.child.run_validation(s)
@@ -171,13 +284,17 @@ def to_internal_value(self, data):
def to_representation(self, value):
if not isinstance(value, list):
# we can't use isinstance because TagRelatedManager is non-existing class
- # it cannot be imported or referenced, so we fallback to string comparison
- if type(value).__name__ == 'TagRelatedManager':
+ # it cannot be imported or referenced, so we fallback to string
+ # comparison
+ if type(value).__name__ == "TagRelatedManager":
value = value.get_tag_list()
elif isinstance(value, str):
value = tagulous.utils.parse_tags(value)
else:
- raise ValueError('unable to convert %s into list of tags' % type(value).__name__)
+ raise ValueError(
+ "unable to convert %s into list of tags"
+ % type(value).__name__
+ )
return value
@@ -193,7 +310,8 @@ def update(self, instance, validated_data):
to_be_tagged, validated_data = self._pop_tags(validated_data)
tag_object = super(TaggitSerializer, self).update(
- instance, validated_data)
+ instance, validated_data
+ )
return self._save_tags(tag_object, to_be_tagged)
@@ -237,7 +355,8 @@ def __getitem__(self, item):
def __str__(self):
if self.pretty_print:
return json.dumps(
- self, sort_keys=True, indent=4, separators=(',', ': '))
+ self, sort_keys=True, indent=4, separators=(",", ": ")
+ )
else:
return json.dumps(self)
@@ -245,12 +364,17 @@ def __str__(self):
class RequestResponseSerializerField(serializers.ListSerializer):
child = DictField(child=serializers.CharField())
default_error_messages = {
- 'not_a_list': _(
- 'Expected a list of items but got type "{input_type}".'),
- 'invalid_json': _('Invalid json list. A tag list submitted in string'
- ' form must be valid json.'),
- 'not_a_dict': _('All list items must be of dict type with keys \'request\' and \'response\''),
- 'not_a_str': _('All values in the dict must be of string type.')
+ "not_a_list": _(
+ 'Expected a list of items but got type "{input_type}".'
+ ),
+ "invalid_json": _(
+ "Invalid json list. A tag list submitted in string"
+ " form must be valid json."
+ ),
+ "not_a_dict": _(
+ "All list items must be of dict type with keys 'request' and 'response'"
+ ),
+ "not_a_str": _("All values in the dict must be of string type."),
}
order_by = None
@@ -258,7 +382,7 @@ def __init__(self, **kwargs):
pretty_print = kwargs.pop("pretty_print", True)
style = kwargs.pop("style", {})
- kwargs["style"] = {'base_template': 'textarea.html'}
+ kwargs["style"] = {"base_template": "textarea.html"}
kwargs["style"].update(style)
if "data" in kwargs:
@@ -278,21 +402,21 @@ def to_internal_value(self, data):
try:
data = json.loads(data)
except ValueError:
- self.fail('invalid_json')
+ self.fail("invalid_json")
if not isinstance(data, list):
- self.fail('not_a_list', input_type=type(data).__name__)
+ self.fail("not_a_list", input_type=type(data).__name__)
for s in data:
if not isinstance(s, dict):
- self.fail('not_a_dict', input_type=type(s).__name__)
+ self.fail("not_a_dict", input_type=type(s).__name__)
- request = s.get('request', None)
- response = s.get('response', None)
+ request = s.get("request", None)
+ response = s.get("response", None)
if not isinstance(request, str):
- self.fail('not_a_str', input_type=type(request).__name__)
+ self.fail("not_a_str", input_type=type(request).__name__)
if not isinstance(response, str):
- self.fail('not_a_str', input_type=type(request).__name__)
+ self.fail("not_a_str", input_type=type(request).__name__)
self.child.run_validation(s)
return data
@@ -305,7 +429,13 @@ def to_representation(self, value):
burps = value.all().order_by(*self.order_by)
else:
burps = value.all()
- value = [{'request': burp.get_request(), 'response': burp.get_response()} for burp in burps]
+ value = [
+ {
+ "request": burp.get_request(),
+ "response": burp.get_response(),
+ }
+ for burp in burps
+ ]
return value
@@ -315,18 +445,24 @@ class BurpRawRequestResponseSerializer(serializers.Serializer):
class MetaSerializer(serializers.ModelSerializer):
- product = serializers.PrimaryKeyRelatedField(queryset=Product.objects.all(),
- required=False,
- default=None,
- allow_null=True)
- endpoint = serializers.PrimaryKeyRelatedField(queryset=Endpoint.objects.all(),
- required=False,
- default=None,
- allow_null=True)
- finding = serializers.PrimaryKeyRelatedField(queryset=Finding.objects.all(),
- required=False,
- default=None,
- allow_null=True)
+ product = serializers.PrimaryKeyRelatedField(
+ queryset=Product.objects.all(),
+ required=False,
+ default=None,
+ allow_null=True,
+ )
+ endpoint = serializers.PrimaryKeyRelatedField(
+ queryset=Endpoint.objects.all(),
+ required=False,
+ default=None,
+ allow_null=True,
+ )
+ finding = serializers.PrimaryKeyRelatedField(
+ queryset=Finding.objects.all(),
+ required=False,
+ default=None,
+ allow_null=True,
+ )
def validate(self, data):
DojoMeta(**data).clean()
@@ -334,106 +470,313 @@ def validate(self, data):
class Meta:
model = DojoMeta
- fields = '__all__'
+ fields = "__all__"
class ProductMetaSerializer(serializers.ModelSerializer):
class Meta:
model = DojoMeta
- fields = ('name', 'value')
+ fields = ("name", "value")
class UserSerializer(serializers.ModelSerializer):
last_login = serializers.DateTimeField(read_only=True)
- password = serializers.CharField(write_only=True, style={'input_type': 'password'}, required=False,
- validators=[validate_password])
+ password = serializers.CharField(
+ write_only=True,
+ style={"input_type": "password"},
+ required=False,
+ validators=[validate_password],
+ )
+ configuration_permissions = serializers.PrimaryKeyRelatedField(
+ allow_null=True,
+ queryset=Permission.objects.filter(
+ codename__in=get_configuration_permissions_codenames()
+ ),
+ many=True,
+ required=False,
+ source="user_permissions",
+ )
class Meta:
- model = User
- fields = ('id', 'username', 'first_name', 'last_name', 'email', 'last_login', 'is_active', 'is_staff', 'is_superuser', 'password')
+ model = Dojo_User
+ fields = (
+ "id",
+ "username",
+ "first_name",
+ "last_name",
+ "email",
+ "last_login",
+ "is_active",
+ "is_superuser",
+ "password",
+ "configuration_permissions",
+ )
+
+ def to_representation(self, instance):
+ ret = super().to_representation(instance)
+
+ # This will show only "configuration_permissions" even if user has also
+ # other permissions
+ all_permissions = set(ret["configuration_permissions"])
+ allowed_configuration_permissions = set(
+ self.fields[
+ "configuration_permissions"
+ ].child_relation.queryset.values_list("id", flat=True)
+ )
+ ret["configuration_permissions"] = list(
+ all_permissions.intersection(allowed_configuration_permissions)
+ )
+
+ return ret
+
+ def update(self, instance, validated_data):
+ new_configuration_permissions = None
+ if (
+ "user_permissions" in validated_data
+ ): # This field was renamed from "configuration_permissions" in the meantime
+ new_configuration_permissions = set(
+ validated_data.pop("user_permissions")
+ )
+
+ instance = super().update(instance, validated_data)
+
+ # This will update only Permissions from category
+ # "configuration_permissions". Others will be untouched
+ if new_configuration_permissions:
+ allowed_configuration_permissions = set(
+ self.fields[
+ "configuration_permissions"
+ ].child_relation.queryset.all()
+ )
+ non_configuration_permissions = (
+ set(instance.user_permissions.all())
+ - allowed_configuration_permissions
+ )
+ new_permissions = non_configuration_permissions.union(
+ new_configuration_permissions
+ )
+ instance.user_permissions.set(new_permissions)
+
+ return instance
def create(self, validated_data):
- if 'password' in validated_data:
- password = validated_data.pop('password')
+ if "password" in validated_data:
+ password = validated_data.pop("password")
else:
password = None
- user = User.objects.create(**validated_data)
+
+ new_configuration_permissions = None
+ if (
+ "user_permissions" in validated_data
+ ): # This field was renamed from "configuration_permissions" in the meantime
+ new_configuration_permissions = set(
+ validated_data.pop("user_permissions")
+ )
+
+ user = Dojo_User.objects.create(**validated_data)
+
if password:
user.set_password(password)
else:
user.set_unusable_password()
+
+ # This will create only Permissions from category
+ # "configuration_permissions". There are no other Permissions.
+ if new_configuration_permissions:
+ user.user_permissions.set(new_configuration_permissions)
+
user.save()
return user
def validate(self, data):
- if self.context['request'].method in ['PATCH', 'PUT'] and 'password' in data:
- raise ValidationError('Update of password though API is not allowed')
+ if self.instance is not None:
+ instance_is_superuser = self.instance.is_superuser
+ else:
+ instance_is_superuser = False
+ data_is_superuser = data.get("is_superuser", False)
+ if not self.context["request"].user.is_superuser and (
+ instance_is_superuser or data_is_superuser
+ ):
+ raise ValidationError(
+ "Only superusers are allowed to add or edit superusers."
+ )
+
+ if (
+ self.context["request"].method in ["PATCH", "PUT"]
+ and "password" in data
+ ):
+ raise ValidationError(
+ "Update of password though API is not allowed"
+ )
else:
return super().validate(data)
class UserContactInfoSerializer(serializers.ModelSerializer):
-
class Meta:
model = UserContactInfo
- fields = '__all__'
+ fields = "__all__"
class UserStubSerializer(serializers.ModelSerializer):
class Meta:
- model = User
- fields = ('id', 'username', 'first_name', 'last_name')
+ model = Dojo_User
+ fields = ("id", "username", "first_name", "last_name")
class RoleSerializer(serializers.ModelSerializer):
-
class Meta:
model = Role
- fields = '__all__'
+ fields = "__all__"
class DojoGroupSerializer(serializers.ModelSerializer):
+ configuration_permissions = serializers.PrimaryKeyRelatedField(
+ allow_null=True,
+ queryset=Permission.objects.filter(
+ codename__in=get_configuration_permissions_codenames()
+ ),
+ many=True,
+ required=False,
+ source="auth_group.permissions",
+ )
class Meta:
model = Dojo_Group
- exclude = ['auth_group']
+ exclude = ("auth_group",)
+
+ def to_representation(self, instance):
+ if not instance.auth_group:
+ auth_group = Group(name=get_auth_group_name(instance))
+ auth_group.save()
+ instance.auth_group = auth_group
+ members = instance.users.all()
+ for member in members:
+ auth_group.user_set.add(member)
+ instance.save()
+ ret = super().to_representation(instance)
+ # This will show only "configuration_permissions" even if user has also
+ # other permissions
+ all_permissions = set(ret["configuration_permissions"])
+ allowed_configuration_permissions = set(
+ self.fields[
+ "configuration_permissions"
+ ].child_relation.queryset.values_list("id", flat=True)
+ )
+ ret["configuration_permissions"] = list(
+ all_permissions.intersection(allowed_configuration_permissions)
+ )
+ return ret
+
+ def create(self, validated_data):
+ new_configuration_permissions = None
+ if (
+ "auth_group" in validated_data
+ and "permissions" in validated_data["auth_group"]
+ ): # This field was renamed from "configuration_permissions" in the meantime
+ new_configuration_permissions = set(
+ validated_data.pop("auth_group")["permissions"]
+ )
+
+ instance = super().create(validated_data)
+
+ # This will update only Permissions from category
+ # "configuration_permissions". There are no other Permissions.
+ if new_configuration_permissions:
+ instance.auth_group.permissions.set(new_configuration_permissions)
+
+ return instance
+
+ def update(self, instance, validated_data):
+ new_configuration_permissions = None
+ if (
+ "auth_group" in validated_data
+ and "permissions" in validated_data["auth_group"]
+ ): # This field was renamed from "configuration_permissions" in the meantime
+ new_configuration_permissions = set(
+ validated_data.pop("auth_group")["permissions"]
+ )
+
+ instance = super().update(instance, validated_data)
+
+ # This will update only Permissions from category
+ # "configuration_permissions". Others will be untouched
+ if new_configuration_permissions:
+ allowed_configuration_permissions = set(
+ self.fields[
+ "configuration_permissions"
+ ].child_relation.queryset.all()
+ )
+ non_configuration_permissions = (
+ set(instance.auth_group.permissions.all())
+ - allowed_configuration_permissions
+ )
+ new_permissions = non_configuration_permissions.union(
+ new_configuration_permissions
+ )
+ instance.auth_group.permissions.set(new_permissions)
+
+ return instance
-class DojoGroupMemberSerializer(serializers.ModelSerializer):
+class DojoGroupMemberSerializer(serializers.ModelSerializer):
class Meta:
model = Dojo_Group_Member
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.instance is not None and \
- data.get('group') != self.instance.group and \
- not user_has_permission(self.context['request'].user, data.get('group'), Permissions.Group_Manage_Members):
- raise PermissionDenied('You are not permitted to add a user to this group')
-
- if self.instance is None or \
- data.get('group') != self.instance.group or \
- data.get('user') != self.instance.user:
- members = Dojo_Group_Member.objects.filter(group=data.get('group'), user=data.get('user'))
+ if (
+ self.instance is not None
+ and data.get("group") != self.instance.group
+ and not user_has_permission(
+ self.context["request"].user,
+ data.get("group"),
+ Permissions.Group_Manage_Members,
+ )
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a user to this group"
+ )
+
+ if (
+ self.instance is None
+ or data.get("group") != self.instance.group
+ or data.get("user") != self.instance.user
+ ):
+ members = Dojo_Group_Member.objects.filter(
+ group=data.get("group"), user=data.get("user")
+ )
if members.count() > 0:
- raise ValidationError('Dojo_Group_Member already exists')
-
- if self.instance is not None and not data.get('role').is_owner:
- owners = Dojo_Group_Member.objects.filter(group=data.get('group'), role__is_owner=True).exclude(id=self.instance.id).count()
+ raise ValidationError("Dojo_Group_Member already exists")
+
+ if self.instance is not None and not data.get("role").is_owner:
+ owners = (
+ Dojo_Group_Member.objects.filter(
+ group=data.get("group"), role__is_owner=True
+ )
+ .exclude(id=self.instance.id)
+ .count()
+ )
if owners < 1:
- raise ValidationError('There must be at least one owner')
+ raise ValidationError("There must be at least one owner")
- if data.get('role').is_owner and not user_has_permission(self.context['request'].user, data.get('group'), Permissions.Group_Add_Owner):
- raise PermissionDenied('You are not permitted to add a user as Owner to this group')
+ if data.get("role").is_owner and not user_has_permission(
+ self.context["request"].user,
+ data.get("group"),
+ Permissions.Group_Add_Owner,
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a user as Owner to this group"
+ )
return data
class GlobalRoleSerializer(serializers.ModelSerializer):
-
class Meta:
model = Global_Role
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
user = None
@@ -443,51 +786,57 @@ def validate(self, data):
user = self.instance.user
group = self.instance.group
- if 'user' in data:
- user = data.get('user')
- if 'group' in data:
- group = data.get('group')
+ if "user" in data:
+ user = data.get("user")
+ if "group" in data:
+ group = data.get("group")
if user is None and group is None:
raise ValidationError("Global_Role must have either user or group")
if user is not None and group is not None:
- raise ValidationError("Global_Role cannot have both user and group")
+ raise ValidationError(
+ "Global_Role cannot have both user and group"
+ )
return data
class AddUserSerializer(serializers.ModelSerializer):
-
class Meta:
model = User
- fields = ('id', 'username')
+ fields = ("id", "username")
+
+
+class NoteTypeSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Note_Type
+ fields = "__all__"
class NoteHistorySerializer(serializers.ModelSerializer):
current_editor = UserStubSerializer(read_only=True)
+ note_type = NoteTypeSerializer(read_only=True, many=False)
class Meta:
model = NoteHistory
- fields = '__all__'
+ fields = "__all__"
class NoteSerializer(serializers.ModelSerializer):
- author = UserStubSerializer(
- many=False, read_only=True)
- editor = UserStubSerializer(
- read_only=True, many=False, allow_null=True)
-
+ author = UserStubSerializer(many=False, read_only=True)
+ editor = UserStubSerializer(read_only=True, many=False, allow_null=True)
history = NoteHistorySerializer(read_only=True, many=True)
+ note_type = NoteTypeSerializer(read_only=True, many=False)
def update(self, instance, validated_data):
- instance.entry = validated_data['entry']
+ instance.entry = validated_data.get("entry")
instance.edited = True
- instance.editor = self.context['request'].user
+ instance.editor = self.context["request"].user
instance.edit_time = timezone.now()
history = NoteHistory(
data=instance.entry,
time=instance.edit_time,
- current_editor=instance.editor
+ current_editor=instance.editor,
)
history.save()
instance.history.add(history)
@@ -496,133 +845,216 @@ def update(self, instance, validated_data):
class Meta:
model = Notes
- fields = '__all__'
+ fields = "__all__"
-class NoteTypeSerializer(serializers.ModelSerializer):
+class FileSerializer(serializers.ModelSerializer):
+ file = serializers.FileField(required=True)
+
class Meta:
- model = Note_Type
- fields = '__all__'
+ model = FileUpload
+ fields = "__all__"
-class FileSerializer(serializers.ModelSerializer):
+class RawFileSerializer(serializers.ModelSerializer):
file = serializers.FileField(required=True)
class Meta:
model = FileUpload
- fields = '__all__'
+ fields = ["file"]
-class ProductMemberSerializer(serializers.ModelSerializer):
+class RiskAcceptanceProofSerializer(serializers.ModelSerializer):
+ path = serializers.FileField(required=True)
+ class Meta:
+ model = Risk_Acceptance
+ fields = ["path"]
+
+
+class ProductMemberSerializer(serializers.ModelSerializer):
class Meta:
model = Product_Member
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.instance is not None and \
- data.get('product') != self.instance.product and \
- not user_has_permission(self.context['request'].user, data.get('product'), Permissions.Product_Manage_Members):
- raise PermissionDenied('You are not permitted to add a member to this product')
-
- if self.instance is None or \
- data.get('product') != self.instance.product or \
- data.get('user') != self.instance.user:
- members = Product_Member.objects.filter(product=data.get('product'), user=data.get('user'))
+ if (
+ self.instance is not None
+ and data.get("product") != self.instance.product
+ and not user_has_permission(
+ self.context["request"].user,
+ data.get("product"),
+ Permissions.Product_Manage_Members,
+ )
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a member to this product"
+ )
+
+ if (
+ self.instance is None
+ or data.get("product") != self.instance.product
+ or data.get("user") != self.instance.user
+ ):
+ members = Product_Member.objects.filter(
+ product=data.get("product"), user=data.get("user")
+ )
if members.count() > 0:
- raise ValidationError('Product_Member already exists')
+ raise ValidationError("Product_Member already exists")
- if data.get('role').is_owner and not user_has_permission(self.context['request'].user, data.get('product'), Permissions.Product_Member_Add_Owner):
- raise PermissionDenied('You are not permitted to add a member as Owner to this product')
+ if data.get("role").is_owner and not user_has_permission(
+ self.context["request"].user,
+ data.get("product"),
+ Permissions.Product_Member_Add_Owner,
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a member as Owner to this product"
+ )
return data
class ProductGroupSerializer(serializers.ModelSerializer):
-
class Meta:
model = Product_Group
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.instance is not None and \
- data.get('product') != self.instance.product and \
- not user_has_permission(self.context['request'].user, data.get('product'), Permissions.Product_Group_Add):
- raise PermissionDenied('You are not permitted to add a group to this product')
-
- if self.instance is None or \
- data.get('product') != self.instance.product or \
- data.get('group') != self.instance.group:
- members = Product_Group.objects.filter(product=data.get('product'), group=data.get('group'))
+ if (
+ self.instance is not None
+ and data.get("product") != self.instance.product
+ and not user_has_permission(
+ self.context["request"].user,
+ data.get("product"),
+ Permissions.Product_Group_Add,
+ )
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a group to this product"
+ )
+
+ if (
+ self.instance is None
+ or data.get("product") != self.instance.product
+ or data.get("group") != self.instance.group
+ ):
+ members = Product_Group.objects.filter(
+ product=data.get("product"), group=data.get("group")
+ )
if members.count() > 0:
- raise ValidationError('Product_Group already exists')
+ raise ValidationError("Product_Group already exists")
- if data.get('role').is_owner and not user_has_permission(self.context['request'].user, data.get('product'), Permissions.Product_Group_Add_Owner):
- raise PermissionDenied('You are not permitted to add a group as Owner to this product')
+ if data.get("role").is_owner and not user_has_permission(
+ self.context["request"].user,
+ data.get("product"),
+ Permissions.Product_Group_Add_Owner,
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a group as Owner to this product"
+ )
return data
class ProductTypeMemberSerializer(serializers.ModelSerializer):
-
class Meta:
model = Product_Type_Member
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.instance is not None and \
- data.get('product_type') != self.instance.product_type and \
- not user_has_permission(self.context['request'].user, data.get('product_type'), Permissions.Product_Type_Manage_Members):
- raise PermissionDenied('You are not permitted to add a member to this product type')
-
- if self.instance is None or \
- data.get('product_type') != self.instance.product_type or \
- data.get('user') != self.instance.user:
- members = Product_Type_Member.objects.filter(product_type=data.get('product_type'), user=data.get('user'))
+ if (
+ self.instance is not None
+ and data.get("product_type") != self.instance.product_type
+ and not user_has_permission(
+ self.context["request"].user,
+ data.get("product_type"),
+ Permissions.Product_Type_Manage_Members,
+ )
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a member to this product type"
+ )
+
+ if (
+ self.instance is None
+ or data.get("product_type") != self.instance.product_type
+ or data.get("user") != self.instance.user
+ ):
+ members = Product_Type_Member.objects.filter(
+ product_type=data.get("product_type"), user=data.get("user")
+ )
if members.count() > 0:
- raise ValidationError('Product_Type_Member already exists')
-
- if self.instance is not None and not data.get('role').is_owner:
- owners = Product_Type_Member.objects.filter(product_type=data.get('product_type'), role__is_owner=True).exclude(id=self.instance.id).count()
+ raise ValidationError("Product_Type_Member already exists")
+
+ if self.instance is not None and not data.get("role").is_owner:
+ owners = (
+ Product_Type_Member.objects.filter(
+ product_type=data.get("product_type"), role__is_owner=True
+ )
+ .exclude(id=self.instance.id)
+ .count()
+ )
if owners < 1:
- raise ValidationError('There must be at least one owner')
+ raise ValidationError("There must be at least one owner")
- if data.get('role').is_owner and not user_has_permission(self.context['request'].user, data.get('product_type'), Permissions.Product_Type_Member_Add_Owner):
- raise PermissionDenied('You are not permitted to add a member as Owner to this product type')
+ if data.get("role").is_owner and not user_has_permission(
+ self.context["request"].user,
+ data.get("product_type"),
+ Permissions.Product_Type_Member_Add_Owner,
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a member as Owner to this product type"
+ )
return data
class ProductTypeGroupSerializer(serializers.ModelSerializer):
-
class Meta:
model = Product_Type_Group
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.instance is not None and \
- data.get('product_type') != self.instance.product_type and \
- not user_has_permission(self.context['request'].user, data.get('product_type'), Permissions.Product_Type_Group_Add):
- raise PermissionDenied('You are not permitted to add a group to this product type')
-
- if self.instance is None or \
- data.get('product_type') != self.instance.product_type or \
- data.get('group') != self.instance.group:
- members = Product_Type_Group.objects.filter(product_type=data.get('product_type'), group=data.get('group'))
+ if (
+ self.instance is not None
+ and data.get("product_type") != self.instance.product_type
+ and not user_has_permission(
+ self.context["request"].user,
+ data.get("product_type"),
+ Permissions.Product_Type_Group_Add,
+ )
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a group to this product type"
+ )
+
+ if (
+ self.instance is None
+ or data.get("product_type") != self.instance.product_type
+ or data.get("group") != self.instance.group
+ ):
+ members = Product_Type_Group.objects.filter(
+ product_type=data.get("product_type"), group=data.get("group")
+ )
if members.count() > 0:
- raise ValidationError('Product_Type_Group already exists')
+ raise ValidationError("Product_Type_Group already exists")
- if data.get('role').is_owner and not user_has_permission(self.context['request'].user, data.get('product_type'), Permissions.Product_Type_Group_Add_Owner):
- raise PermissionDenied('You are not permitted to add a group as Owner to this product type')
+ if data.get("role").is_owner and not user_has_permission(
+ self.context["request"].user,
+ data.get("product_type"),
+ Permissions.Product_Type_Group_Add_Owner,
+ ):
+ raise PermissionDenied(
+ "You are not permitted to add a group as Owner to this product type"
+ )
return data
class ProductTypeSerializer(serializers.ModelSerializer):
-
class Meta:
model = Product_Type
- fields = '__all__'
+ fields = "__all__"
class EngagementSerializer(TaggitSerializer, serializers.ModelSerializer):
@@ -630,128 +1062,186 @@ class EngagementSerializer(TaggitSerializer, serializers.ModelSerializer):
class Meta:
model = Engagement
- fields = '__all__'
+ exclude = ("inherited_tags",)
def validate(self, data):
- if self.context['request'].method == 'POST':
- if data['target_start'] > data['target_end']:
+ if self.context["request"].method == "POST":
+ if data.get("target_start") > data.get("target_end"):
raise serializers.ValidationError(
- 'Your target start date exceeds your target end date')
+ "Your target start date exceeds your target end date"
+ )
return data
def build_relational_field(self, field_name, relation_info):
- if field_name == 'notes':
- return NoteSerializer, {'many': True, 'read_only': True}
- if field_name == 'files':
- return FileSerializer, {'many': True, 'read_only': True}
+ if field_name == "notes":
+ return NoteSerializer, {"many": True, "read_only": True}
+ if field_name == "files":
+ return FileSerializer, {"many": True, "read_only": True}
return super().build_relational_field(field_name, relation_info)
class EngagementToNotesSerializer(serializers.Serializer):
- engagement_id = serializers.PrimaryKeyRelatedField(queryset=Engagement.objects.all(), many=False, allow_null=True)
+ engagement_id = serializers.PrimaryKeyRelatedField(
+ queryset=Engagement.objects.all(), many=False, allow_null=True
+ )
notes = NoteSerializer(many=True)
class EngagementToFilesSerializer(serializers.Serializer):
- engagement_id = serializers.PrimaryKeyRelatedField(queryset=Engagement.objects.all(), many=False, allow_null=True)
+ engagement_id = serializers.PrimaryKeyRelatedField(
+ queryset=Engagement.objects.all(), many=False, allow_null=True
+ )
files = FileSerializer(many=True)
+ def to_representation(self, data):
+ engagement = data.get("engagement_id")
+ files = data.get("files")
+ new_files = []
+ for file in files:
+ new_files.append(
+ {
+ "id": file.id,
+ "file": "{site_url}/{file_access_url}".format(
+ site_url=settings.SITE_URL,
+ file_access_url=file.get_accessible_url(
+ engagement, engagement.id
+ ),
+ ),
+ "title": file.title,
+ }
+ )
+ new_data = {"engagement_id": engagement.id, "files": new_files}
+ return new_data
+
+
+class EngagementCheckListSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Check_List
+ fields = "__all__"
+
class AppAnalysisSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField(required=False)
class Meta:
model = App_Analysis
- fields = '__all__'
+ fields = "__all__"
class ToolTypeSerializer(serializers.ModelSerializer):
class Meta:
model = Tool_Type
- fields = '__all__'
+ fields = "__all__"
+
+ def validate(self, data):
+ if self.context["request"].method == "POST":
+ name = data.get("name")
+ # Make sure this will not create a duplicate test type
+ if Tool_Type.objects.filter(name=name).count() > 0:
+ raise serializers.ValidationError('A Tool Type with the name already exists')
+ return data
class RegulationSerializer(serializers.ModelSerializer):
class Meta:
model = Regulation
- fields = '__all__'
+ fields = "__all__"
class ToolConfigurationSerializer(serializers.ModelSerializer):
- configuration_url = serializers.CharField(source='url')
-
class Meta:
model = Tool_Configuration
- fields = '__all__'
+ fields = "__all__"
extra_kwargs = {
- 'password': {'write_only': True},
- 'ssh': {'write_only': True},
- 'api_key': {'write_only': True},
+ "password": {"write_only": True},
+ "ssh": {"write_only": True},
+ "api_key": {"write_only": True},
}
class ToolProductSettingsSerializer(serializers.ModelSerializer):
- setting_url = serializers.CharField(source='url')
+ setting_url = serializers.CharField(source="url")
+ product = serializers.PrimaryKeyRelatedField(
+ queryset=Product.objects.all(), required=True
+ )
class Meta:
model = Tool_Product_Settings
- fields = '__all__'
+ fields = "__all__"
class EndpointStatusSerializer(serializers.ModelSerializer):
class Meta:
model = Endpoint_Status
- fields = '__all__'
+ fields = "__all__"
def create(self, validated_data):
- endpoint = validated_data['endpoint']
- finding = validated_data['finding']
- status = Endpoint_Status.objects.create(
- finding=finding,
- endpoint=endpoint
- )
- endpoint.endpoint_status.add(status)
- finding.endpoint_status.add(status)
- status.mitigated = validated_data.get('mitigated', False)
- status.false_positive = validated_data.get('false_positive', False)
- status.out_of_scope = validated_data.get('out_of_scope', False)
- status.risk_accepted = validated_data.get('risk_accepted', False)
- status.date = validated_data.get('date', timezone.now())
+ endpoint = validated_data.get("endpoint")
+ finding = validated_data.get("finding")
+ try:
+ status = Endpoint_Status.objects.create(
+ finding=finding, endpoint=endpoint
+ )
+ except IntegrityError as ie:
+ if "endpoint-finding relation" in str(ie):
+ raise serializers.ValidationError(
+ "This endpoint-finding relation already exists"
+ )
+ else:
+ raise
+ status.mitigated = validated_data.get("mitigated", False)
+ status.false_positive = validated_data.get("false_positive", False)
+ status.out_of_scope = validated_data.get("out_of_scope", False)
+ status.risk_accepted = validated_data.get("risk_accepted", False)
+ status.date = validated_data.get("date", get_current_date())
status.save()
return status
+ def update(self, instance, validated_data):
+ try:
+ return super().update(instance, validated_data)
+ except IntegrityError as ie:
+ if "endpoint-finding relation" in str(ie):
+ raise serializers.ValidationError(
+ "This endpoint-finding relation already exists"
+ )
+ else:
+ raise
+
class EndpointSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField(required=False)
class Meta:
model = Endpoint
- fields = '__all__'
+ exclude = ("inherited_tags",)
def validate(self, data):
# print('EndpointSerialize.validate')
- if not self.context['request'].method == 'PATCH':
- if 'product' not in data:
- raise serializers.ValidationError('Product is required')
- protocol = data.get('protocol')
- userinfo = data.get('userinfo')
- host = data.get('host')
- port = data.get('port')
- path = data.get('path')
- query = data.get('query')
- fragment = data.get('fragment')
- product = data.get('product')
+ if not self.context["request"].method == "PATCH":
+ if "product" not in data:
+ raise serializers.ValidationError("Product is required")
+ protocol = data.get("protocol")
+ userinfo = data.get("userinfo")
+ host = data.get("host")
+ port = data.get("port")
+ path = data.get("path")
+ query = data.get("query")
+ fragment = data.get("fragment")
+ product = data.get("product")
else:
- protocol = data.get('protocol', self.instance.protocol)
- userinfo = data.get('userinfo', self.instance.userinfo)
- host = data.get('host', self.instance.host)
- port = data.get('port', self.instance.port)
- path = data.get('path', self.instance.path)
- query = data.get('query', self.instance.query)
- fragment = data.get('fragment', self.instance.fragment)
- if 'product' in data and data['product'] != self.instance.product:
- raise serializers.ValidationError('Change of product is not possible')
+ protocol = data.get("protocol", self.instance.protocol)
+ userinfo = data.get("userinfo", self.instance.userinfo)
+ host = data.get("host", self.instance.host)
+ port = data.get("port", self.instance.port)
+ path = data.get("path", self.instance.path)
+ query = data.get("query", self.instance.query)
+ fragment = data.get("fragment", self.instance.fragment)
+ if "product" in data and data["product"] != self.instance.product:
+ raise serializers.ValidationError(
+ "Change of product is not possible"
+ )
product = self.instance.product
endpoint_ins = Endpoint(
@@ -762,7 +1252,7 @@ def validate(self, data):
path=path,
query=query,
fragment=fragment,
- product=product
+ product=product,
)
endpoint_ins.clean() # Run standard validation and clean process; can raise errors
@@ -774,57 +1264,77 @@ def validate(self, data):
path=endpoint_ins.path,
query=endpoint_ins.query,
fragment=endpoint_ins.fragment,
- product=endpoint_ins.product
+ product=endpoint_ins.product,
)
- if ((self.context['request'].method in ["PUT", "PATCH"] and
- ((endpoint.count() > 1) or
- (endpoint.count() == 1 and
- endpoint.first().pk != self.instance.pk))) or
- (self.context['request'].method in ["POST"] and endpoint.count() > 0)):
+ if (
+ self.context["request"].method in ["PUT", "PATCH"]
+ and (
+ (endpoint.count() > 1)
+ or (
+ endpoint.count() == 1
+ and endpoint.first().pk != self.instance.pk
+ )
+ )
+ ) or (
+ self.context["request"].method in ["POST"] and endpoint.count() > 0
+ ):
raise serializers.ValidationError(
- 'It appears as though an endpoint with this data already '
- 'exists for this product.',
- code='invalid')
+ "It appears as though an endpoint with this data already "
+ "exists for this product.",
+ code="invalid",
+ )
# use clean data
- data['protocol'] = endpoint_ins.protocol
- data['userinfo'] = endpoint_ins.userinfo
- data['host'] = endpoint_ins.host
- data['port'] = endpoint_ins.port
- data['path'] = endpoint_ins.path
- data['query'] = endpoint_ins.query
- data['fragment'] = endpoint_ins.fragment
- data['product'] = endpoint_ins.product
+ data["protocol"] = endpoint_ins.protocol
+ data["userinfo"] = endpoint_ins.userinfo
+ data["host"] = endpoint_ins.host
+ data["port"] = endpoint_ins.port
+ data["path"] = endpoint_ins.path
+ data["query"] = endpoint_ins.query
+ data["fragment"] = endpoint_ins.fragment
+ data["product"] = endpoint_ins.product
return data
+class EndpointParamsSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Endpoint_Params
+ fields = "__all__"
+
+
class JIRAIssueSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField(read_only=True)
class Meta:
model = JIRA_Issue
- fields = '__all__'
+ fields = "__all__"
def get_url(self, obj) -> str:
return jira_helper.get_jira_issue_url(obj)
def validate(self, data):
- if self.context['request'].method == 'PATCH':
- engagement = data.get('engagement', self.instance.engagement)
- finding = data.get('finding', self.instance.finding)
- finding_group = data.get('finding_group', self.instance.finding_group)
+ if self.context["request"].method == "PATCH":
+ engagement = data.get("engagement", self.instance.engagement)
+ finding = data.get("finding", self.instance.finding)
+ finding_group = data.get(
+ "finding_group", self.instance.finding_group
+ )
else:
- engagement = data.get('engagement', None)
- finding = data.get('finding', None)
- finding_group = data.get('finding_group', None)
-
- if ((engagement and not finding and not finding_group) or
- (finding and not engagement and not finding_group) or
- (finding_group and not engagement and not finding)):
+ engagement = data.get("engagement", None)
+ finding = data.get("finding", None)
+ finding_group = data.get("finding_group", None)
+
+ if (
+ (engagement and not finding and not finding_group)
+ or (finding and not engagement and not finding_group)
+ or (finding_group and not engagement and not finding)
+ ):
pass
else:
- raise serializers.ValidationError('Either engagement or finding or finding_group has to be set.')
+ raise serializers.ValidationError(
+ "Either engagement or finding or finding_group has to be set."
+ )
return data
@@ -832,27 +1342,29 @@ def validate(self, data):
class JIRAInstanceSerializer(serializers.ModelSerializer):
class Meta:
model = JIRA_Instance
- fields = '__all__'
+ fields = "__all__"
extra_kwargs = {
- 'password': {'write_only': True},
+ "password": {"write_only": True},
}
class JIRAProjectSerializer(serializers.ModelSerializer):
class Meta:
model = JIRA_Project
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
- if self.context['request'].method == 'PATCH':
- engagement = data.get('engagement', self.instance.engagement)
- product = data.get('product', self.instance.product)
+ if self.context["request"].method == "PATCH":
+ engagement = data.get("engagement", self.instance.engagement)
+ product = data.get("product", self.instance.product)
else:
- engagement = data.get('engagement', None)
- product = data.get('product', None)
+ engagement = data.get("engagement", None)
+ product = data.get("product", None)
- if ((engagement and product) or (not engagement and not product)):
- raise serializers.ValidationError('Either engagement or product has to be set.')
+ if (engagement and product) or (not engagement and not product):
+ raise serializers.ValidationError(
+ "Either engagement or product has to be set."
+ )
return data
@@ -860,26 +1372,25 @@ def validate(self, data):
class SonarqubeIssueSerializer(serializers.ModelSerializer):
class Meta:
model = Sonarqube_Issue
- fields = '__all__'
+ fields = "__all__"
class SonarqubeIssueTransitionSerializer(serializers.ModelSerializer):
class Meta:
model = Sonarqube_Issue_Transition
- fields = '__all__'
+ fields = "__all__"
class ProductAPIScanConfigurationSerializer(serializers.ModelSerializer):
class Meta:
model = Product_API_Scan_Configuration
- fields = '__all__'
+ fields = "__all__"
class DevelopmentEnvironmentSerializer(serializers.ModelSerializer):
-
class Meta:
model = Development_Environment
- fields = '__all__'
+ fields = "__all__"
class FindingGroupSerializer(serializers.ModelSerializer):
@@ -887,39 +1398,43 @@ class FindingGroupSerializer(serializers.ModelSerializer):
class Meta:
model = Finding_Group
- fields = ('id', 'name', 'test', 'jira_issue')
+ fields = ("id", "name", "test", "jira_issue")
class TestSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField(required=False)
test_type_name = serializers.ReadOnlyField()
- finding_groups = FindingGroupSerializer(source='finding_group_set', many=True, read_only=True)
+ finding_groups = FindingGroupSerializer(
+ source="finding_group_set", many=True, read_only=True
+ )
class Meta:
model = Test
- fields = '__all__'
+ exclude = ("inherited_tags",)
def build_relational_field(self, field_name, relation_info):
- if field_name == 'notes':
- return NoteSerializer, {'many': True, 'read_only': True}
- if field_name == 'files':
- return FileSerializer, {'many': True, 'read_only': True}
+ if field_name == "notes":
+ return NoteSerializer, {"many": True, "read_only": True}
+ if field_name == "files":
+ return FileSerializer, {"many": True, "read_only": True}
return super().build_relational_field(field_name, relation_info)
class TestCreateSerializer(TaggitSerializer, serializers.ModelSerializer):
engagement = serializers.PrimaryKeyRelatedField(
- queryset=Engagement.objects.all())
+ queryset=Engagement.objects.all()
+ )
notes = serializers.PrimaryKeyRelatedField(
allow_null=True,
queryset=Notes.objects.all(),
many=True,
- required=False)
+ required=False,
+ )
tags = TagListSerializerField(required=False)
class Meta:
model = Test
- fields = '__all__'
+ exclude = ("inherited_tags",)
class TestTypeSerializer(TaggitSerializer, serializers.ModelSerializer):
@@ -927,44 +1442,118 @@ class TestTypeSerializer(TaggitSerializer, serializers.ModelSerializer):
class Meta:
model = Test_Type
- fields = '__all__'
+ fields = "__all__"
class TestToNotesSerializer(serializers.Serializer):
- test_id = serializers.PrimaryKeyRelatedField(queryset=Test.objects.all(), many=False, allow_null=True)
+ test_id = serializers.PrimaryKeyRelatedField(
+ queryset=Test.objects.all(), many=False, allow_null=True
+ )
notes = NoteSerializer(many=True)
class TestToFilesSerializer(serializers.Serializer):
- test_id = serializers.PrimaryKeyRelatedField(queryset=Test.objects.all(), many=False, allow_null=True)
+ test_id = serializers.PrimaryKeyRelatedField(
+ queryset=Test.objects.all(), many=False, allow_null=True
+ )
files = FileSerializer(many=True)
+ def to_representation(self, data):
+ test = data.get("test_id")
+ files = data.get("files")
+ new_files = []
+ for file in files:
+ new_files.append(
+ {
+ "id": file.id,
+ "file": "{site_url}/{file_access_url}".format(
+ site_url=settings.SITE_URL,
+ file_access_url=file.get_accessible_url(test, test.id),
+ ),
+ "title": file.title,
+ }
+ )
+ new_data = {"test_id": test.id, "files": new_files}
+ return new_data
+
class TestImportFindingActionSerializer(serializers.ModelSerializer):
class Meta:
model = Test_Import_Finding_Action
- fields = '__all__'
+ fields = "__all__"
class TestImportSerializer(serializers.ModelSerializer):
# findings = TestImportFindingActionSerializer(source='test_import_finding_action', many=True, read_only=True)
- test_import_finding_action_set = TestImportFindingActionSerializer(many=True, read_only=True)
+ test_import_finding_action_set = TestImportFindingActionSerializer(
+ many=True, read_only=True
+ )
class Meta:
model = Test_Import
- fields = '__all__'
+ fields = "__all__"
class RiskAcceptanceSerializer(serializers.ModelSerializer):
+ recommendation = serializers.SerializerMethodField()
+ decision = serializers.SerializerMethodField()
+ path = serializers.SerializerMethodField()
+
+ @extend_schema_field(serializers.CharField())
+ @swagger_serializer_method(serializers.CharField())
+ def get_recommendation(self, obj):
+ return Risk_Acceptance.TREATMENT_TRANSLATIONS.get(obj.recommendation)
+
+ @extend_schema_field(serializers.CharField())
+ @swagger_serializer_method(serializers.CharField())
+ def get_decision(self, obj):
+ return Risk_Acceptance.TREATMENT_TRANSLATIONS.get(obj.decision)
+
+ @extend_schema_field(serializers.CharField())
+ @swagger_serializer_method(serializers.CharField())
+ def get_path(self, obj):
+ engagement = Engagement.objects.filter(
+ risk_acceptance__id__in=[obj.id]
+ ).first()
+ path = "No proof has been supplied"
+ if engagement and obj.filename() is not None:
+ path = reverse(
+ "download_risk_acceptance", args=(engagement.id, obj.id)
+ )
+ request = self.context.get("request")
+ if request:
+ path = request.build_absolute_uri(path)
+ return path
+
+ @extend_schema_field(serializers.IntegerField())
+ @swagger_serializer_method(serializers.IntegerField())
+ def get_engagement(self, obj):
+ engagement = Engagement.objects.filter(
+ risk_acceptance__id__in=[obj.id]
+ ).first()
+ return EngagementSerializer(read_only=True).to_representation(
+ engagement
+ )
+
+ def validate(self, data):
+ if self.context["request"].method == "POST":
+ findings = data['accepted_findings']
+ for finding in findings:
+ if not user_has_permission(self.context["request"].user, finding, Permissions.Finding_View):
+ raise PermissionDenied(
+ "You are not permitted to add one or more selected findings to this risk acceptance"
+ )
+ return data
+
class Meta:
model = Risk_Acceptance
- fields = '__all__'
+ fields = "__all__"
class FindingMetaSerializer(serializers.ModelSerializer):
class Meta:
model = DojoMeta
- fields = ('name', 'value')
+ fields = ("name", "value")
class FindingProdTypeSerializer(serializers.ModelSerializer):
@@ -986,7 +1575,21 @@ class FindingEngagementSerializer(serializers.ModelSerializer):
class Meta:
model = Engagement
- fields = ["id", "name", "product", "branch_tag", "build_id", "commit_hash", "version"]
+ fields = [
+ "id",
+ "name",
+ "description",
+ "product",
+ "target_start",
+ "target_end",
+ "branch_tag",
+ "engagement_type",
+ "build_id",
+ "commit_hash",
+ "version",
+ "created",
+ "updated",
+ ]
class FindingEnvironmentSerializer(serializers.ModelSerializer):
@@ -1008,7 +1611,17 @@ class FindingTestSerializer(serializers.ModelSerializer):
class Meta:
model = Test
- fields = ["id", "title", "test_type", "engagement", "environment", "branch_tag", "build_id", "commit_hash", "version"]
+ fields = [
+ "id",
+ "title",
+ "test_type",
+ "engagement",
+ "environment",
+ "branch_tag",
+ "build_id",
+ "commit_hash",
+ "version",
+ ]
class FindingRelatedFieldsSerializer(serializers.Serializer):
@@ -1018,7 +1631,9 @@ class FindingRelatedFieldsSerializer(serializers.Serializer):
@extend_schema_field(FindingTestSerializer)
@swagger_serializer_method(FindingTestSerializer)
def get_test(self, obj):
- return FindingTestSerializer(read_only=True).to_representation(obj.test)
+ return FindingTestSerializer(read_only=True).to_representation(
+ obj.test
+ )
@extend_schema_field(JIRAIssueSerializer)
@swagger_serializer_method(JIRAIssueSerializer)
@@ -1029,10 +1644,18 @@ def get_jira(self, obj):
return JIRAIssueSerializer(read_only=True).to_representation(issue)
+class VulnerabilityIdSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Vulnerability_Id
+ fields = ["vulnerability_id"]
+
+
class FindingSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField(required=False)
request_response = serializers.SerializerMethodField()
- accepted_risks = RiskAcceptanceSerializer(many=True, read_only=True, source='risk_acceptance_set')
+ accepted_risks = RiskAcceptanceSerializer(
+ many=True, read_only=True, source="risk_acceptance_set"
+ )
push_to_jira = serializers.BooleanField(default=False)
age = serializers.IntegerField(read_only=True)
sla_days_remaining = serializers.IntegerField(read_only=True)
@@ -1042,11 +1665,22 @@ class FindingSerializer(TaggitSerializer, serializers.ModelSerializer):
jira_creation = serializers.SerializerMethodField(read_only=True)
jira_change = serializers.SerializerMethodField(read_only=True)
display_status = serializers.SerializerMethodField()
- finding_groups = FindingGroupSerializer(source='finding_group_set', many=True, read_only=True)
+ finding_groups = FindingGroupSerializer(
+ source="finding_group_set", many=True, read_only=True
+ )
+ vulnerability_ids = VulnerabilityIdSerializer(
+ source="vulnerability_id_set", many=True, required=False
+ )
+ reporter = serializers.PrimaryKeyRelatedField(
+ required=False, queryset=User.objects.all()
+ )
class Meta:
model = Finding
- fields = '__all__'
+ exclude = (
+ "cve",
+ "inherited_tags",
+ )
@extend_schema_field(serializers.DateTimeField())
@swagger_serializer_method(serializers.DateTimeField())
@@ -1061,13 +1695,15 @@ def get_jira_change(self, obj):
@extend_schema_field(FindingRelatedFieldsSerializer)
@swagger_serializer_method(FindingRelatedFieldsSerializer)
def get_related_fields(self, obj):
- request = self.context.get('request', None)
+ request = self.context.get("request", None)
if request is None:
return None
query_params = request.query_params
- if query_params.get('related_fields', 'false') == 'true':
- return FindingRelatedFieldsSerializer(required=False).to_representation(obj)
+ if query_params.get("related_fields", "false") == "true":
+ return FindingRelatedFieldsSerializer(
+ required=False
+ ).to_representation(obj)
else:
return None
@@ -1080,59 +1716,91 @@ def update(self, instance, validated_data):
to_be_tagged, validated_data = self._pop_tags(validated_data)
# pop push_to_jira so it won't get send to the model as a field
- # TODO: JIRA can we remove this is_push_all_issues, already checked in apiv2 viewset?
- push_to_jira = validated_data.pop('push_to_jira') or jira_helper.is_push_all_issues(instance)
-
- instance = super(TaggitSerializer, self).update(instance, validated_data)
+ # TODO: JIRA can we remove this is_push_all_issues, already checked in
+ # apiv2 viewset?
+ push_to_jira = validated_data.pop(
+ "push_to_jira"
+ ) or jira_helper.is_push_all_issues(instance)
+
+ # Save vulnerability ids and pop them
+ if "vulnerability_id_set" in validated_data:
+ vulnerability_id_set = validated_data.pop("vulnerability_id_set")
+ vulnerability_ids = list()
+ if vulnerability_id_set:
+ for vulnerability_id in vulnerability_id_set:
+ vulnerability_ids.append(
+ vulnerability_id["vulnerability_id"]
+ )
+ save_vulnerability_ids(instance, vulnerability_ids)
+
+ instance = super(TaggitSerializer, self).update(
+ instance, validated_data
+ )
+ # Save the reporter on the finding
+ if reporter_id := validated_data.get("reporter"):
+ instance.reporter = reporter_id
# If we need to push to JIRA, an extra save call is needed.
# Also if we need to update the mitigation date of the finding.
- # TODO try to combine create and save, but for now I'm just fixing a bug and don't want to change to much
+ # TODO try to combine create and save, but for now I'm just fixing a
+ # bug and don't want to change to much
if push_to_jira:
instance.save(push_to_jira=push_to_jira)
- # not sure why we are returning a tag_object, but don't want to change too much now as we're just fixing a bug
+ # not sure why we are returning a tag_object, but don't want to change
+ # too much now as we're just fixing a bug
tag_object = self._save_tags(instance, to_be_tagged)
return tag_object
def validate(self, data):
- if self.context['request'].method == 'PATCH':
- is_active = data.get('active', self.instance.active)
- is_verified = data.get('verified', self.instance.verified)
- is_duplicate = data.get('duplicate', self.instance.duplicate)
- is_false_p = data.get('false_p', self.instance.false_p)
- is_risk_accepted = data.get('risk_accepted', self.instance.risk_accepted)
+ if self.context["request"].method == "PATCH":
+ is_active = data.get("active", self.instance.active)
+ is_verified = data.get("verified", self.instance.verified)
+ is_duplicate = data.get("duplicate", self.instance.duplicate)
+ is_false_p = data.get("false_p", self.instance.false_p)
+ is_risk_accepted = data.get(
+ "risk_accepted", self.instance.risk_accepted
+ )
else:
- is_active = data.get('active', True)
- is_verified = data.get('verified', True)
- is_duplicate = data.get('duplicate', False)
- is_false_p = data.get('false_p', False)
- is_risk_accepted = data.get('risk_accepted', False)
-
- if ((is_active or is_verified) and is_duplicate):
- raise serializers.ValidationError('Duplicate findings cannot be'
- ' verified or active')
+ is_active = data.get("active", True)
+ is_verified = data.get("verified", False)
+ is_duplicate = data.get("duplicate", False)
+ is_false_p = data.get("false_p", False)
+ is_risk_accepted = data.get("risk_accepted", False)
+
+ if (is_active or is_verified) and is_duplicate:
+ raise serializers.ValidationError(
+ "Duplicate findings cannot be" " verified or active"
+ )
if is_false_p and is_verified:
- raise serializers.ValidationError('False positive findings cannot '
- 'be verified.')
+ raise serializers.ValidationError(
+ "False positive findings cannot " "be verified."
+ )
if is_risk_accepted and not self.instance.risk_accepted:
- if not self.instance.test.engagement.product.enable_simple_risk_acceptance:
- raise serializers.ValidationError('Simple risk acceptance is disabled for this product, use the UI to accept this finding.')
+ if (
+ not self.instance.test.engagement.product.enable_simple_risk_acceptance
+ ):
+ raise serializers.ValidationError(
+ "Simple risk acceptance is disabled for this product, use the UI to accept this finding."
+ )
if is_active and is_risk_accepted:
- raise serializers.ValidationError('Active findings cannot '
- 'be risk accepted.')
+ raise serializers.ValidationError(
+ "Active findings cannot be risk accepted."
+ )
return data
def build_relational_field(self, field_name, relation_info):
- if field_name == 'notes':
- return NoteSerializer, {'many': True, 'read_only': True}
+ if field_name == "notes":
+ return NoteSerializer, {"many": True, "read_only": True}
return super().build_relational_field(field_name, relation_info)
@extend_schema_field(BurpRawRequestResponseSerializer)
- @swagger_serializer_method(serializer_or_field=BurpRawRequestResponseSerializer)
+ @swagger_serializer_method(
+ serializer_or_field=BurpRawRequestResponseSerializer
+ )
def get_request_response(self, obj):
# burp_req_resp = BurpRawRequestResponse.objects.filter(finding=obj)
burp_req_resp = obj.burprawrequestresponse_set.all()
@@ -1140,36 +1808,41 @@ def get_request_response(self, obj):
for burp in burp_req_resp:
request = burp.get_request()
response = burp.get_response()
- burp_list.append({'request': request, 'response': response})
- serialized_burps = BurpRawRequestResponseSerializer({'req_resp': burp_list})
+ burp_list.append({"request": request, "response": response})
+ serialized_burps = BurpRawRequestResponseSerializer(
+ {"req_resp": burp_list}
+ )
return serialized_burps.data
class FindingCreateSerializer(TaggitSerializer, serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(
- read_only=True,
- allow_null=True,
- required=False,
- many=True)
- test = serializers.PrimaryKeyRelatedField(
- queryset=Test.objects.all())
+ read_only=True, allow_null=True, required=False, many=True
+ )
+ test = serializers.PrimaryKeyRelatedField(queryset=Test.objects.all())
thread_id = serializers.IntegerField(default=0)
found_by = serializers.PrimaryKeyRelatedField(
- queryset=Test_Type.objects.all(),
- many=True)
- url = serializers.CharField(
- allow_null=True,
- default=None)
+ queryset=Test_Type.objects.all(), many=True
+ )
+ url = serializers.CharField(allow_null=True, default=None)
tags = TagListSerializerField(required=False)
push_to_jira = serializers.BooleanField(default=False)
+ vulnerability_ids = VulnerabilityIdSerializer(
+ source="vulnerability_id_set", many=True, required=False
+ )
+ reporter = serializers.PrimaryKeyRelatedField(
+ required=False, queryset=User.objects.all()
+ )
class Meta:
model = Finding
- fields = '__all__'
+ exclude = (
+ "cve",
+ "inherited_tags",
+ )
extra_kwargs = {
- 'active': {'required': True},
- 'verified': {'required': True},
- 'reporter': {'default': serializers.CurrentUserDefault()},
+ "active": {"required": True},
+ "verified": {"required": True},
}
# Overriding this to push add Push to JIRA functionality
@@ -1178,67 +1851,162 @@ def create(self, validated_data):
to_be_tagged, validated_data = self._pop_tags(validated_data)
# pop push_to_jira so it won't get send to the model as a field
- push_to_jira = validated_data.pop('push_to_jira')
+ push_to_jira = validated_data.pop("push_to_jira")
+
+ # Save vulnerability ids and pop them
+ if "vulnerability_id_set" in validated_data:
+ vulnerability_id_set = validated_data.pop("vulnerability_id_set")
+ else:
+ vulnerability_id_set = None
# first save, so we have an instance to get push_all_to_jira from
new_finding = super(TaggitSerializer, self).create(validated_data)
- # TODO: JIRA can we remove this is_push_all_issues, already checked in apiv2 viewset?
- push_to_jira = push_to_jira or jira_helper.is_push_all_issues(new_finding)
+ if vulnerability_id_set:
+ vulnerability_ids = list()
+ for vulnerability_id in vulnerability_id_set:
+ vulnerability_ids.append(vulnerability_id["vulnerability_id"])
+ validated_data["cve"] = vulnerability_ids[0]
+ save_vulnerability_ids(new_finding, vulnerability_ids)
+ new_finding.save()
+
+ # TODO: JIRA can we remove this is_push_all_issues, already checked in
+ # apiv2 viewset?
+ push_to_jira = push_to_jira or jira_helper.is_push_all_issues(
+ new_finding
+ )
# If we need to push to JIRA, an extra save call is needed.
- # TODO try to combine create and save, but for now I'm just fixing a bug and don't want to change to much
+ # TODO try to combine create and save, but for now I'm just fixing a
+ # bug and don't want to change to much
if push_to_jira or new_finding:
new_finding.save(push_to_jira=push_to_jira)
- # not sure why we are returning a tag_object, but don't want to change too much now as we're just fixing a bug
+ # not sure why we are returning a tag_object, but don't want to change
+ # too much now as we're just fixing a bug
tag_object = self._save_tags(new_finding, to_be_tagged)
return tag_object
def validate(self, data):
- if ((data['active'] or data['verified']) and data['duplicate']):
- raise serializers.ValidationError('Duplicate findings cannot be'
- ' verified or active')
- if data['false_p'] and data['verified']:
- raise serializers.ValidationError('False positive findings cannot '
- 'be verified.')
-
- if 'risk_accepted' in data and data['risk_accepted']:
- test = data['test']
+ if "reporter" not in data:
+ request = self.context["request"]
+ data["reporter"] = request.user
+
+ if (data.get("active") or data.get("verified")) and data.get(
+ "duplicate"
+ ):
+ raise serializers.ValidationError(
+ "Duplicate findings cannot be verified or active"
+ )
+ if data.get("false_p") and data.get("verified"):
+ raise serializers.ValidationError(
+ "False positive findings cannot be verified."
+ )
+
+ if "risk_accepted" in data and data.get("risk_accepted"):
+ test = data.get("test")
# test = Test.objects.get(id=test_id)
if not test.engagement.product.enable_simple_risk_acceptance:
- raise serializers.ValidationError('Simple risk acceptance is disabled for this product, use the UI to accept this finding.')
-
- if data['active'] and 'risk_accepted' in data and data['risk_accepted']:
- raise serializers.ValidationError('Active findings cannot '
- 'be risk accepted.')
+ raise serializers.ValidationError(
+ "Simple risk acceptance is disabled for this product, use the UI to accept this finding."
+ )
+
+ if (
+ data.get("active")
+ and "risk_accepted" in data
+ and data.get("risk_accepted")
+ ):
+ raise serializers.ValidationError(
+ "Active findings cannot be risk accepted."
+ )
return data
+class VulnerabilityIdTemplateSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Vulnerability_Id_Template
+ fields = ["vulnerability_id"]
+
+
class FindingTemplateSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField(required=False)
+ vulnerability_ids = VulnerabilityIdTemplateSerializer(
+ source="vulnerability_id_template_set", many=True, required=False
+ )
class Meta:
model = Finding_Template
- fields = '__all__'
+ exclude = ("cve",)
+
+ def create(self, validated_data):
+ # Save vulnerability ids and pop them
+ if "vulnerability_id_template_set" in validated_data:
+ vulnerability_id_set = validated_data.pop(
+ "vulnerability_id_template_set"
+ )
+ else:
+ vulnerability_id_set = None
+
+ new_finding_template = super(TaggitSerializer, self).create(
+ validated_data
+ )
+
+ if vulnerability_id_set:
+ vulnerability_ids = list()
+ for vulnerability_id in vulnerability_id_set:
+ vulnerability_ids.append(vulnerability_id["vulnerability_id"])
+ validated_data["cve"] = vulnerability_ids[0]
+ save_vulnerability_ids_template(
+ new_finding_template, vulnerability_ids
+ )
+ new_finding_template.save()
+
+ return new_finding_template
+
+ def update(self, instance, validated_data):
+ # Save vulnerability ids and pop them
+ if "vulnerability_id_template_set" in validated_data:
+ vulnerability_id_set = validated_data.pop(
+ "vulnerability_id_template_set"
+ )
+ vulnerability_ids = list()
+ if vulnerability_id_set:
+ for vulnerability_id in vulnerability_id_set:
+ vulnerability_ids.append(
+ vulnerability_id["vulnerability_id"]
+ )
+ save_vulnerability_ids_template(instance, vulnerability_ids)
+
+ return super(TaggitSerializer, self).update(instance, validated_data)
+
+
+class CredentialSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Cred_User
+ exclude = ("password",)
+
+
+class CredentialMappingSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Cred_Mapping
+ fields = "__all__"
class StubFindingSerializer(serializers.ModelSerializer):
class Meta:
model = Stub_Finding
- fields = '__all__'
+ fields = "__all__"
class StubFindingCreateSerializer(serializers.ModelSerializer):
- test = serializers.PrimaryKeyRelatedField(
- queryset=Test.objects.all())
+ test = serializers.PrimaryKeyRelatedField(queryset=Test.objects.all())
class Meta:
model = Stub_Finding
- fields = '__all__'
+ fields = "__all__"
extra_kwargs = {
- 'reporter': {'default': serializers.CurrentUserDefault()},
+ "reporter": {"default": serializers.CurrentUserDefault()},
}
@@ -1251,128 +2019,273 @@ class ProductSerializer(TaggitSerializer, serializers.ModelSerializer):
class Meta:
model = Product
- exclude = ['tid', 'updated']
+ exclude = (
+ "tid",
+ "updated",
+ "async_updating"
+ )
+
+ def validate(self, data):
+ async_updating = getattr(self.instance, 'async_updating', None)
+ if async_updating:
+ new_sla_config = data.get('sla_configuration', None)
+ old_sla_config = getattr(self.instance, 'sla_configuration', None)
+ if new_sla_config and old_sla_config and new_sla_config != old_sla_config:
+ raise serializers.ValidationError(
+ 'Finding SLA expiration dates are currently being recalculated. The SLA configuration for this product cannot be changed until the calculation is complete.'
+ )
+ return data
def get_findings_count(self, obj) -> int:
return obj.findings_count
# -> List[int] as return type doesn't seem enough for drf-yasg
- @swagger_serializer_method(serializer_or_field=serializers.ListField(child=serializers.IntegerField()))
+ @swagger_serializer_method(
+ serializer_or_field=serializers.ListField(
+ child=serializers.IntegerField()
+ )
+ )
def get_findings_list(self, obj) -> List[int]:
return obj.open_findings_list
class ImportScanSerializer(serializers.Serializer):
- scan_date = serializers.DateField(required=False)
+ scan_date = serializers.DateField(
+ required=False,
+ help_text="Scan completion date will be used on all findings.",
+ )
minimum_severity = serializers.ChoiceField(
choices=SEVERITY_CHOICES,
- default='Info')
- active = serializers.BooleanField(default=True)
- verified = serializers.BooleanField(default=True)
- scan_type = serializers.ChoiceField(
- choices=get_choices_sorted())
+ default="Info",
+ help_text="Minimum severity level to be imported",
+ )
+ active = serializers.BooleanField(
+ help_text="Override the active setting from the tool."
+ )
+ verified = serializers.BooleanField(
+ help_text="Override the verified setting from the tool."
+ )
+ scan_type = serializers.ChoiceField(choices=get_choices_sorted())
# TODO why do we allow only existing endpoints?
- endpoint_to_add = serializers.PrimaryKeyRelatedField(queryset=Endpoint.objects.all(),
- required=False,
- default=None)
- file = serializers.FileField(required=False)
+ endpoint_to_add = serializers.PrimaryKeyRelatedField(
+ queryset=Endpoint.objects.all(),
+ required=False,
+ default=None,
+ help_text="The IP address, host name or full URL. It must be valid",
+ )
+ file = serializers.FileField(allow_empty_file=True, required=False)
product_type_name = serializers.CharField(required=False)
product_name = serializers.CharField(required=False)
engagement_name = serializers.CharField(required=False)
+ engagement_end_date = serializers.DateField(
+ required=False,
+ help_text="End Date for Engagement. Default is current time + 365 days. Required format year-month-day",
+ )
+ source_code_management_uri = serializers.URLField(
+ max_length=600,
+ required=False,
+ help_text="Resource link to source code",
+ )
engagement = serializers.PrimaryKeyRelatedField(
- queryset=Engagement.objects.all(), required=False)
+ queryset=Engagement.objects.all(), required=False
+ )
test_title = serializers.CharField(required=False)
auto_create_context = serializers.BooleanField(required=False)
-
+ deduplication_on_engagement = serializers.BooleanField(required=False)
lead = serializers.PrimaryKeyRelatedField(
- allow_null=True,
- default=None,
- queryset=User.objects.all())
- tags = TagListSerializerField(required=False)
- close_old_findings = serializers.BooleanField(required=False, default=False,
+ allow_null=True, default=None, queryset=User.objects.all()
+ )
+ tags = TagListSerializerField(
+ required=False, help_text="Add tags that help describe this scan."
+ )
+ close_old_findings = serializers.BooleanField(
+ required=False,
+ default=False,
help_text="Select if old findings no longer present in the report get closed as mitigated when importing. "
- "If service has been set, only the findings for this service will be closed.")
+ "If service has been set, only the findings for this service will be closed.",
+ )
+ close_old_findings_product_scope = serializers.BooleanField(
+ required=False,
+ default=False,
+ help_text="Select if close_old_findings applies to all findings of the same type in the product. "
+ "By default, it is false meaning that only old findings of the same type in the engagement are in scope.",
+ )
push_to_jira = serializers.BooleanField(default=False)
environment = serializers.CharField(required=False)
- version = serializers.CharField(required=False)
- build_id = serializers.CharField(required=False)
- branch_tag = serializers.CharField(required=False)
- commit_hash = serializers.CharField(required=False)
- api_scan_configuration = serializers.PrimaryKeyRelatedField(allow_null=True, default=None,
- queryset=Product_API_Scan_Configuration.objects.all())
- service = serializers.CharField(required=False,
+ version = serializers.CharField(
+ required=False, help_text="Version that was scanned."
+ )
+ build_id = serializers.CharField(
+ required=False, help_text="ID of the build that was scanned."
+ )
+ branch_tag = serializers.CharField(
+ required=False, help_text="Branch or Tag that was scanned."
+ )
+ commit_hash = serializers.CharField(
+ required=False, help_text="Commit that was scanned."
+ )
+ api_scan_configuration = serializers.PrimaryKeyRelatedField(
+ allow_null=True,
+ default=None,
+ queryset=Product_API_Scan_Configuration.objects.all(),
+ )
+ service = serializers.CharField(
+ required=False,
help_text="A service is a self-contained piece of functionality within a Product. "
- "This is an optional field which is used in deduplication and closing of old findings when set. "
- "This affects the whole engagement/product depending on your deduplication scope.")
+ "This is an optional field which is used in deduplication and closing of old findings when set. "
+ "This affects the whole engagement/product depending on your deduplication scope.",
+ )
- group_by = serializers.ChoiceField(required=False, choices=Finding_Group.GROUP_BY_OPTIONS, help_text='Choose an option to automatically group new findings by the chosen option.')
+ group_by = serializers.ChoiceField(
+ required=False,
+ choices=Finding_Group.GROUP_BY_OPTIONS,
+ help_text="Choose an option to automatically group new findings by the chosen option.",
+ )
+ create_finding_groups_for_all_findings = serializers.BooleanField(
+ help_text="If set to false, finding groups will only be created when there is more than one grouped finding",
+ required=False,
+ default=True,
+ )
# extra fields populated in response
- # need to use the _id suffix as without the serializer framework gets confused
- test = serializers.IntegerField(read_only=True) # left for backwards compatibility
+ # need to use the _id suffix as without the serializer framework gets
+ # confused
+ test = serializers.IntegerField(
+ read_only=True
+ ) # left for backwards compatibility
test_id = serializers.IntegerField(read_only=True)
engagement_id = serializers.IntegerField(read_only=True)
product_id = serializers.IntegerField(read_only=True)
product_type_id = serializers.IntegerField(read_only=True)
statistics = ImportStatisticsSerializer(read_only=True, required=False)
+ apply_tags_to_findings = serializers.BooleanField(
+ help_text="If set to True, the tags will be applied to the findings",
+ required=False,
+ )
def save(self, push_to_jira=False):
data = self.validated_data
- close_old_findings = data['close_old_findings']
- active = data['active']
- verified = data['verified']
- minimum_severity = data['minimum_severity']
- endpoint_to_add = data['endpoint_to_add']
- scan_date = data.get('scan_date', None)
- # Will save in the provided environment or in the `Development` one if absent
- version = data.get('version', None)
- build_id = data.get('build_id', None)
- branch_tag = data.get('branch_tag', None)
- commit_hash = data.get('commit_hash', None)
- api_scan_configuration = data.get('api_scan_configuration', None)
- service = data.get('service', None)
-
- environment_name = data.get('environment', 'Development')
- environment = Development_Environment.objects.get(name=environment_name)
- tags = data.get('tags', None)
- lead = data['lead']
-
- scan = data.get('file', None)
+ close_old_findings = data.get("close_old_findings")
+ close_old_findings_product_scope = data.get(
+ "close_old_findings_product_scope"
+ )
+ minimum_severity = data.get("minimum_severity")
+ endpoint_to_add = data.get("endpoint_to_add")
+ scan_date = data.get("scan_date", None)
+ # Will save in the provided environment or in the `Development` one if
+ # absent
+ version = data.get("version", None)
+ build_id = data.get("build_id", None)
+ branch_tag = data.get("branch_tag", None)
+ commit_hash = data.get("commit_hash", None)
+ api_scan_configuration = data.get("api_scan_configuration", None)
+ service = data.get("service", None)
+ apply_tags_to_findings = data.get("apply_tags_to_findings", False)
+ source_code_management_uri = data.get(
+ "source_code_management_uri", None
+ )
+
+ if "active" in self.initial_data:
+ active = data.get("active")
+ else:
+ active = None
+ if "verified" in self.initial_data:
+ verified = data.get("verified")
+ else:
+ verified = None
+
+ environment_name = data.get("environment", "Development")
+ environment = Development_Environment.objects.get(
+ name=environment_name
+ )
+ tags = data.get("tags", None)
+ lead = data.get("lead")
+
+ scan = data.get("file", None)
endpoints_to_add = [endpoint_to_add] if endpoint_to_add else None
- group_by = data.get('group_by', None)
+ group_by = data.get("group_by", None)
+ create_finding_groups_for_all_findings = data.get(
+ "create_finding_groups_for_all_findings", True
+ )
- _, test_title, scan_type, engagement_id, engagement_name, product_name, product_type_name, auto_create_context = get_import_meta_data_from_dict(data)
- engagement = get_or_create_engagement(engagement_id, engagement_name, product_name, product_type_name, auto_create_context)
+ engagement_end_date = data.get("engagement_end_date", None)
+ (
+ _,
+ test_title,
+ scan_type,
+ engagement_id,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = get_import_meta_data_from_dict(data)
+ engagement = get_or_create_engagement(
+ engagement_id,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ source_code_management_uri=source_code_management_uri,
+ target_end=engagement_end_date,
+ )
- # have to make the scan_date_time timezone aware otherwise uploads via the API would fail (but unit tests for api upload would pass...)
- scan_date_time = timezone.make_aware(datetime.combine(scan_date, datetime.min.time())) if scan_date else None
+ # have to make the scan_date_time timezone aware otherwise uploads via
+ # the API would fail (but unit tests for api upload would pass...)
+ scan_date_time = (
+ timezone.make_aware(
+ datetime.combine(scan_date, datetime.min.time())
+ )
+ if scan_date
+ else None
+ )
importer = Importer()
try:
- test, finding_count, closed_finding_count, test_import = importer.import_scan(scan, scan_type, engagement, lead, environment,
- active=active, verified=verified, tags=tags,
- minimum_severity=minimum_severity,
- endpoints_to_add=endpoints_to_add,
- scan_date=scan_date_time, version=version,
- branch_tag=branch_tag, build_id=build_id,
- commit_hash=commit_hash,
- push_to_jira=push_to_jira,
- close_old_findings=close_old_findings,
- group_by=group_by,
- api_scan_configuration=api_scan_configuration,
- service=service,
- title=test_title)
+ (
+ test,
+ finding_count,
+ closed_finding_count,
+ test_import,
+ ) = importer.import_scan(
+ scan,
+ scan_type,
+ engagement,
+ lead,
+ environment,
+ active=active,
+ verified=verified,
+ tags=tags,
+ minimum_severity=minimum_severity,
+ endpoints_to_add=endpoints_to_add,
+ scan_date=scan_date_time,
+ version=version,
+ branch_tag=branch_tag,
+ build_id=build_id,
+ commit_hash=commit_hash,
+ push_to_jira=push_to_jira,
+ close_old_findings=close_old_findings,
+ close_old_findings_product_scope=close_old_findings_product_scope,
+ group_by=group_by,
+ api_scan_configuration=api_scan_configuration,
+ service=service,
+ title=test_title,
+ create_finding_groups_for_all_findings=create_finding_groups_for_all_findings,
+ apply_tags_to_findings=apply_tags_to_findings,
+ )
if test:
- data['test'] = test.id
- data['test_id'] = test.id
- data['engagement_id'] = test.engagement.id
- data['product_id'] = test.engagement.product.id
- data['product_type_id'] = test.engagement.product.prod_type.id
- data['statistics'] = {'after': test.statistics}
+ data["test"] = test.id
+ data["test_id"] = test.id
+ data["engagement_id"] = test.engagement.id
+ data["product_id"] = test.engagement.product.id
+ data["product_type_id"] = test.engagement.product.prod_type.id
+ data["statistics"] = {"after": test.statistics}
# convert to exception otherwise django rest framework will swallow them as 400 error
# exceptions are already logged in the importer
@@ -1385,165 +2298,338 @@ def validate(self, data):
scan_type = data.get("scan_type")
file = data.get("file")
if not file and requires_file(scan_type):
- raise serializers.ValidationError('Uploading a Report File is required for {}'.format(scan_type))
+ raise serializers.ValidationError(
+ "Uploading a Report File is required for {}".format(scan_type)
+ )
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
- 'Report file is too large. Maximum supported size is {} MB'.format(settings.SCAN_FILE_MAX_SIZE))
+ "Report file is too large. Maximum supported size is {} MB".format(
+ settings.SCAN_FILE_MAX_SIZE
+ )
+ )
tool_type = requires_tool_type(scan_type)
if tool_type:
- api_scan_configuration = data.get('api_scan_configuration')
- if api_scan_configuration and tool_type != api_scan_configuration.tool_configuration.tool_type.name:
- raise serializers.ValidationError(f'API scan configuration must be of tool type {tool_type}')
+ api_scan_configuration = data.get("api_scan_configuration")
+ if (
+ api_scan_configuration
+ and tool_type
+ != api_scan_configuration.tool_configuration.tool_type.name
+ ):
+ raise serializers.ValidationError(
+ f"API scan configuration must be of tool type {tool_type}"
+ )
return data
def validate_scan_date(self, value):
if value and value > timezone.localdate():
raise serializers.ValidationError(
- 'The scan_date cannot be in the future!')
+ "The scan_date cannot be in the future!"
+ )
return value
class ReImportScanSerializer(TaggitSerializer, serializers.Serializer):
- scan_date = serializers.DateField(required=False)
+ scan_date = serializers.DateField(
+ required=False,
+ help_text="Scan completion date will be used on all findings.",
+ )
minimum_severity = serializers.ChoiceField(
choices=SEVERITY_CHOICES,
- default='Info')
- active = serializers.BooleanField(default=True)
- verified = serializers.BooleanField(default=True)
+ default="Info",
+ help_text="Minimum severity level to be imported",
+ )
+ active = serializers.BooleanField(
+ help_text="Override the active setting from the tool."
+ )
+ verified = serializers.BooleanField(
+ help_text="Override the verified setting from the tool."
+ )
+ help_do_not_reactivate = "Select if the import should ignore active findings from the report, useful for triage-less scanners. Will keep existing findings closed, without reactivating them. For more information check the docs."
+ do_not_reactivate = serializers.BooleanField(
+ default=False, required=False, help_text=help_do_not_reactivate
+ )
scan_type = serializers.ChoiceField(
- choices=get_choices_sorted())
- endpoint_to_add = serializers.PrimaryKeyRelatedField(queryset=Endpoint.objects.all(),
- default=None,
- required=False)
- file = serializers.FileField(required=False)
+ choices=get_choices_sorted(), required=True
+ )
+ endpoint_to_add = serializers.PrimaryKeyRelatedField(
+ queryset=Endpoint.objects.all(), default=None, required=False
+ )
+ file = serializers.FileField(allow_empty_file=True, required=False)
product_type_name = serializers.CharField(required=False)
product_name = serializers.CharField(required=False)
engagement_name = serializers.CharField(required=False)
- test = serializers.PrimaryKeyRelatedField(required=False,
- queryset=Test.objects.all())
+ engagement_end_date = serializers.DateField(
+ required=False,
+ help_text="End Date for Engagement. Default is current time + 365 days. Required format year-month-day",
+ )
+ source_code_management_uri = serializers.URLField(
+ max_length=600,
+ required=False,
+ help_text="Resource link to source code",
+ )
+ test = serializers.PrimaryKeyRelatedField(
+ required=False, queryset=Test.objects.all()
+ )
test_title = serializers.CharField(required=False)
auto_create_context = serializers.BooleanField(required=False)
+ deduplication_on_engagement = serializers.BooleanField(required=False)
push_to_jira = serializers.BooleanField(default=False)
# Close the old findings if the parameter is not provided. This is to
# mentain the old API behavior after reintroducing the close_old_findings parameter
# also for ReImport.
- close_old_findings = serializers.BooleanField(required=False, default=True)
- version = serializers.CharField(required=False)
- build_id = serializers.CharField(required=False)
- branch_tag = serializers.CharField(required=False)
- commit_hash = serializers.CharField(required=False)
- api_scan_configuration = serializers.PrimaryKeyRelatedField(allow_null=True, default=None,
- queryset=Product_API_Scan_Configuration.objects.all())
- service = serializers.CharField(required=False,
+ close_old_findings = serializers.BooleanField(
+ required=False,
+ default=True,
+ help_text="Select if old findings no longer present in the report get closed as mitigated when importing.",
+ )
+ close_old_findings_product_scope = serializers.BooleanField(
+ required=False,
+ default=False,
+ help_text="Select if close_old_findings applies to all findings of the same type in the product. "
+ "By default, it is false meaning that only old findings of the same type in the engagement are in scope. "
+ "Note that this only applies on the first call to reimport-scan.",
+ )
+ version = serializers.CharField(
+ required=False,
+ help_text="Version that will be set on existing Test object. Leave empty to leave existing value in place.",
+ )
+ build_id = serializers.CharField(
+ required=False, help_text="ID of the build that was scanned."
+ )
+ branch_tag = serializers.CharField(
+ required=False, help_text="Branch or Tag that was scanned."
+ )
+ commit_hash = serializers.CharField(
+ required=False, help_text="Commit that was scanned."
+ )
+ api_scan_configuration = serializers.PrimaryKeyRelatedField(
+ allow_null=True,
+ default=None,
+ queryset=Product_API_Scan_Configuration.objects.all(),
+ )
+ service = serializers.CharField(
+ required=False,
help_text="A service is a self-contained piece of functionality within a Product. "
- "This is an optional field which is used in deduplication and closing of old findings when set. "
- "This affects the whole engagement/product depending on your deduplication scope.")
+ "This is an optional field which is used in deduplication and closing of old findings when set. "
+ "This affects the whole engagement/product depending on your deduplication scope.",
+ )
environment = serializers.CharField(required=False)
lead = serializers.PrimaryKeyRelatedField(
- allow_null=True,
- default=None,
- queryset=User.objects.all())
- tags = TagListSerializerField(required=False)
+ allow_null=True, default=None, queryset=User.objects.all()
+ )
+ tags = TagListSerializerField(
+ required=False,
+ help_text="Modify existing tags that help describe this scan. (Existing test tags will be overwritten)",
+ )
- group_by = serializers.ChoiceField(required=False, choices=Finding_Group.GROUP_BY_OPTIONS, help_text='Choose an option to automatically group new findings by the chosen option.')
+ group_by = serializers.ChoiceField(
+ required=False,
+ choices=Finding_Group.GROUP_BY_OPTIONS,
+ help_text="Choose an option to automatically group new findings by the chosen option.",
+ )
+ create_finding_groups_for_all_findings = serializers.BooleanField(
+ help_text="If set to false, finding groups will only be created when there is more than one grouped finding",
+ required=False,
+ default=True,
+ )
# extra fields populated in response
- # need to use the _id suffix as without the serializer framework gets confused
+ # need to use the _id suffix as without the serializer framework gets
+ # confused
test_id = serializers.IntegerField(read_only=True)
- engagement_id = serializers.IntegerField(read_only=True) # need to use the _id suffix as without the serializer framework gets confused
+ engagement_id = serializers.IntegerField(
+ read_only=True
+ ) # need to use the _id suffix as without the serializer framework gets confused
product_id = serializers.IntegerField(read_only=True)
product_type_id = serializers.IntegerField(read_only=True)
statistics = ImportStatisticsSerializer(read_only=True, required=False)
+ apply_tags_to_findings = serializers.BooleanField(
+ help_text="If set to True, the tags will be applied to the findings",
+ required=False
+ )
def save(self, push_to_jira=False):
- logger.debug('push_to_jira: %s', push_to_jira)
+ logger.debug("push_to_jira: %s", push_to_jira)
data = self.validated_data
- scan_type = data['scan_type']
- endpoint_to_add = data['endpoint_to_add']
- minimum_severity = data['minimum_severity']
- scan_date = data.get('scan_date', None)
- close_old_findings = data['close_old_findings']
- verified = data['verified']
- active = data['active']
- version = data.get('version', None)
- build_id = data.get('build_id', None)
- branch_tag = data.get('branch_tag', None)
- commit_hash = data.get('commit_hash', None)
- api_scan_configuration = data.get('api_scan_configuration', None)
- service = data.get('service', None)
- lead = data.get('lead', None)
- tags = data.get('tags', None)
- environment_name = data.get('environment', 'Development')
- environment = Development_Environment.objects.get(name=environment_name)
-
- scan = data.get('file', None)
+ scan_type = data.get("scan_type")
+ endpoint_to_add = data.get("endpoint_to_add")
+ minimum_severity = data.get("minimum_severity")
+ scan_date = data.get("scan_date", None)
+ close_old_findings = data.get("close_old_findings")
+ close_old_findings_product_scope = data.get(
+ "close_old_findings_product_scope"
+ )
+ apply_tags_to_findings = data.get("apply_tags_to_findings", False)
+ do_not_reactivate = data.get("do_not_reactivate", False)
+ version = data.get("version", None)
+ build_id = data.get("build_id", None)
+ branch_tag = data.get("branch_tag", None)
+ commit_hash = data.get("commit_hash", None)
+ api_scan_configuration = data.get("api_scan_configuration", None)
+ service = data.get("service", None)
+ lead = data.get("lead", None)
+ tags = data.get("tags", None)
+ environment_name = data.get("environment", "Development")
+ environment = Development_Environment.objects.get(
+ name=environment_name
+ )
+ scan = data.get("file", None)
endpoints_to_add = [endpoint_to_add] if endpoint_to_add else None
+ source_code_management_uri = data.get(
+ "source_code_management_uri", None
+ )
+ engagement_end_date = data.get("engagement_end_date", None)
+
+ if "active" in self.initial_data:
+ active = data.get("active")
+ else:
+ active = None
+ if "verified" in self.initial_data:
+ verified = data.get("verified")
+ else:
+ verified = None
- group_by = data.get('group_by', None)
+ group_by = data.get("group_by", None)
+ create_finding_groups_for_all_findings = data.get(
+ "create_finding_groups_for_all_findings", True
+ )
- test_id, test_title, scan_type, _, engagement_name, product_name, product_type_name, auto_create_context = get_import_meta_data_from_dict(data)
+ (
+ test_id,
+ test_title,
+ scan_type,
+ _,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = get_import_meta_data_from_dict(data)
# we passed validation, so the test is present
product = get_target_product_if_exists(product_name)
- engagement = get_target_engagement_if_exists(None, engagement_name, product)
- test = get_target_test_if_exists(test_id, test_title, scan_type, engagement)
+ engagement = get_target_engagement_if_exists(
+ None, engagement_name, product
+ )
+ test = get_target_test_if_exists(
+ test_id, test_title, scan_type, engagement
+ )
- # have to make the scan_date_time timezone aware otherwise uploads via the API would fail (but unit tests for api upload would pass...)
- scan_date_time = timezone.make_aware(datetime.combine(scan_date, datetime.min.time())) if scan_date else None
+ # have to make the scan_date_time timezone aware otherwise uploads via
+ # the API would fail (but unit tests for api upload would pass...)
+ scan_date_time = (
+ timezone.make_aware(
+ datetime.combine(scan_date, datetime.min.time())
+ )
+ if scan_date
+ else None
+ )
statistics_before, statistics_delta = None, None
+
try:
if test:
# reimport into provided / latest test
statistics_before = test.statistics
reimporter = ReImporter()
- test, finding_count, new_finding_count, closed_finding_count, reactivated_finding_count, untouched_finding_count, test_import = \
- reimporter.reimport_scan(scan, scan_type, test, active=active, verified=verified,
- tags=None, minimum_severity=minimum_severity,
- endpoints_to_add=endpoints_to_add, scan_date=scan_date_time,
- version=version, branch_tag=branch_tag, build_id=build_id,
- commit_hash=commit_hash, push_to_jira=push_to_jira,
- close_old_findings=close_old_findings,
- group_by=group_by, api_scan_configuration=api_scan_configuration,
- service=service)
+ (
+ test,
+ finding_count,
+ new_finding_count,
+ closed_finding_count,
+ reactivated_finding_count,
+ untouched_finding_count,
+ test_import,
+ ) = reimporter.reimport_scan(
+ scan,
+ scan_type,
+ test,
+ active=active,
+ verified=verified,
+ tags=tags,
+ minimum_severity=minimum_severity,
+ endpoints_to_add=endpoints_to_add,
+ scan_date=scan_date_time,
+ version=version,
+ branch_tag=branch_tag,
+ build_id=build_id,
+ commit_hash=commit_hash,
+ push_to_jira=push_to_jira,
+ close_old_findings=close_old_findings,
+ group_by=group_by,
+ api_scan_configuration=api_scan_configuration,
+ service=service,
+ do_not_reactivate=do_not_reactivate,
+ create_finding_groups_for_all_findings=create_finding_groups_for_all_findings,
+ apply_tags_to_findings=apply_tags_to_findings,
+ )
if test_import:
statistics_delta = test_import.statistics
elif auto_create_context:
# perform Import to create test
- logger.debug('reimport for non-existing test, using import to create new test')
- engagement = get_or_create_engagement(None, engagement_name, product_name, product_type_name, auto_create_context)
+ logger.debug(
+ "reimport for non-existing test, using import to create new test"
+ )
+ engagement = get_or_create_engagement(
+ None,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ source_code_management_uri=source_code_management_uri,
+ target_end=engagement_end_date,
+ )
importer = Importer()
- test, finding_count, closed_finding_count, _ = importer.import_scan(scan, scan_type, engagement, lead, environment,
- active=active, verified=verified, tags=tags,
- minimum_severity=minimum_severity,
- endpoints_to_add=endpoints_to_add,
- scan_date=scan_date_time, version=version,
- branch_tag=branch_tag, build_id=build_id,
- commit_hash=commit_hash,
- push_to_jira=push_to_jira,
- close_old_findings=close_old_findings,
- group_by=group_by,
- api_scan_configuration=api_scan_configuration,
- service=service,
- title=test_title)
+ (
+ test,
+ finding_count,
+ closed_finding_count,
+ _,
+ ) = importer.import_scan(
+ scan,
+ scan_type,
+ engagement,
+ lead,
+ environment,
+ active=active,
+ verified=verified,
+ tags=tags,
+ minimum_severity=minimum_severity,
+ endpoints_to_add=endpoints_to_add,
+ scan_date=scan_date_time,
+ version=version,
+ branch_tag=branch_tag,
+ build_id=build_id,
+ commit_hash=commit_hash,
+ push_to_jira=push_to_jira,
+ close_old_findings=close_old_findings,
+ close_old_findings_product_scope=close_old_findings_product_scope,
+ group_by=group_by,
+ api_scan_configuration=api_scan_configuration,
+ service=service,
+ title=test_title,
+ create_finding_groups_for_all_findings=create_finding_groups_for_all_findings,
+ )
else:
# should be captured by validation / permission check already
- raise NotFound('test not found')
+ raise NotFound("test not found")
if test:
- data['test'] = test
- data['test_id'] = test.id
- data['engagement_id'] = test.engagement.id
- data['product_id'] = test.engagement.product.id
- data['product_type_id'] = test.engagement.product.prod_type.id
- data['statistics'] = {}
+ data["test"] = test
+ data["test_id"] = test.id
+ data["engagement_id"] = test.engagement.id
+ data["product_id"] = test.engagement.product.id
+ data["product_type_id"] = test.engagement.product.prod_type.id
+ data["statistics"] = {}
if statistics_before:
- data['statistics']['before'] = statistics_before
+ data["statistics"]["before"] = statistics_before
if statistics_delta:
- data['statistics']['delta'] = statistics_delta
- data['statistics']['after'] = test.statistics
+ data["statistics"]["delta"] = statistics_delta
+ data["statistics"]["after"] = test.statistics
# convert to exception otherwise django rest framework will swallow them as 400 error
# exceptions are already logged in the importer
@@ -1556,66 +2642,94 @@ def validate(self, data):
scan_type = data.get("scan_type")
file = data.get("file")
if not file and requires_file(scan_type):
- raise serializers.ValidationError('Uploading a Report File is required for {}'.format(scan_type))
+ raise serializers.ValidationError(
+ "Uploading a Report File is required for {}".format(scan_type)
+ )
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
- 'Report file is too large. Maximum supported size is {} MB'.format(settings.SCAN_FILE_MAX_SIZE))
+ "Report file is too large. Maximum supported size is {} MB".format(
+ settings.SCAN_FILE_MAX_SIZE
+ )
+ )
tool_type = requires_tool_type(scan_type)
if tool_type:
- api_scan_configuration = data.get('api_scan_configuration')
- if api_scan_configuration and tool_type != api_scan_configuration.tool_configuration.tool_type.name:
- raise serializers.ValidationError(f'API scan configuration must be of tool type {tool_type}')
+ api_scan_configuration = data.get("api_scan_configuration")
+ if (
+ api_scan_configuration
+ and tool_type
+ != api_scan_configuration.tool_configuration.tool_type.name
+ ):
+ raise serializers.ValidationError(
+ f"API scan configuration must be of tool type {tool_type}"
+ )
return data
def validate_scan_date(self, value):
if value and value > timezone.localdate():
raise serializers.ValidationError(
- 'The scan_date cannot be in the future!')
+ "The scan_date cannot be in the future!"
+ )
return value
class EndpointMetaImporterSerializer(serializers.Serializer):
- file = serializers.FileField(
- required=True)
- create_endpoints = serializers.BooleanField(
- default=True,
- required=False)
- create_tags = serializers.BooleanField(
- default=True,
- required=False)
- create_dojo_meta = serializers.BooleanField(
- default=False,
- required=False)
+ file = serializers.FileField(required=True)
+ create_endpoints = serializers.BooleanField(default=True, required=False)
+ create_tags = serializers.BooleanField(default=True, required=False)
+ create_dojo_meta = serializers.BooleanField(default=False, required=False)
product_name = serializers.CharField(required=False)
product = serializers.PrimaryKeyRelatedField(
- queryset=Product.objects.all(), required=False)
+ queryset=Product.objects.all(), required=False
+ )
# extra fields populated in response
- # need to use the _id suffix as without the serializer framework gets confused
+ # need to use the _id suffix as without the serializer framework gets
+ # confused
product_id = serializers.IntegerField(read_only=True)
def validate(self, data):
file = data.get("file")
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
- 'Report file is too large. Maximum supported size is {} MB'.format(settings.SCAN_FILE_MAX_SIZE))
+ "Report file is too large. Maximum supported size is {} MB".format(
+ settings.SCAN_FILE_MAX_SIZE
+ )
+ )
return data
def save(self):
data = self.validated_data
- file = data.get('file', None)
-
- create_endpoints = data['create_endpoints']
- create_tags = data['create_tags']
- create_dojo_meta = data['create_dojo_meta']
+ file = data.get("file")
- _, _, _, _, _, product_name, _, _ = get_import_meta_data_from_dict(data)
+ create_endpoints = data.get("create_endpoints", True)
+ create_tags = data.get("create_tags", True)
+ create_dojo_meta = data.get("create_dojo_meta", False)
+
+ (
+ _,
+ _,
+ _,
+ _,
+ _,
+ product_name,
+ _,
+ _,
+ _,
+ _,
+ ) = get_import_meta_data_from_dict(data)
product = get_target_product_if_exists(product_name)
if not product:
product_id = get_product_id_from_dict(data)
product = get_target_product_by_id_if_exists(product_id)
try:
- endpoint_meta_import(file, product, create_endpoints, create_tags, create_dojo_meta, origin='API')
+ endpoint_meta_import(
+ file,
+ product,
+ create_endpoints,
+ create_tags,
+ create_dojo_meta,
+ origin="API",
+ )
except SyntaxError as se:
raise Exception(se)
except ValueError as ve:
@@ -1623,87 +2737,135 @@ def save(self):
class LanguageTypeSerializer(serializers.ModelSerializer):
-
class Meta:
model = Language_Type
- fields = '__all__'
+ fields = "__all__"
class LanguageSerializer(serializers.ModelSerializer):
-
class Meta:
model = Languages
- fields = '__all__'
+ fields = "__all__"
class ImportLanguagesSerializer(serializers.Serializer):
- product = serializers.PrimaryKeyRelatedField(queryset=Product.objects.all(), required=True)
+ product = serializers.PrimaryKeyRelatedField(
+ queryset=Product.objects.all(), required=True
+ )
file = serializers.FileField(required=True)
def save(self):
data = self.validated_data
- product = data['product']
- languages = data['file']
+ product = data["product"]
+ languages = data["file"]
try:
data = languages.read()
try:
- deserialized = json.loads(str(data, 'utf-8'))
- except:
+ deserialized = json.loads(str(data, "utf-8"))
+ except Exception:
deserialized = json.loads(data)
- except:
+ except Exception:
raise Exception("Invalid format")
Languages.objects.filter(product=product).delete()
for name in deserialized:
- if name not in ['header', 'SUM']:
+ if name not in ["header", "SUM"]:
element = deserialized[name]
try:
- language_type, created = Language_Type.objects.get_or_create(language=name)
+ (
+ language_type,
+ created,
+ ) = Language_Type.objects.get_or_create(language=name)
except Language_Type.MultipleObjectsReturned:
- language_type = Language_Type.objects.filter(language=name).first()
+ language_type = Language_Type.objects.filter(
+ language=name
+ ).first()
language = Languages()
language.product = product
language.language = language_type
- language.files = element.get('nFiles', 0)
- language.blank = element.get('blank', 0)
- language.comment = element.get('comment', 0)
- language.code = element.get('code', 0)
+ language.files = element.get("nFiles", 0)
+ language.blank = element.get("blank", 0)
+ language.comment = element.get("comment", 0)
+ language.code = element.get("code", 0)
language.save()
def validate(self, data):
- if is_scan_file_too_large(data['file']):
+ if is_scan_file_too_large(data["file"]):
raise serializers.ValidationError(
- 'File is too large. Maximum supported size is {} MB'.format(settings.SCAN_FILE_MAX_SIZE))
+ "File is too large. Maximum supported size is {} MB".format(
+ settings.SCAN_FILE_MAX_SIZE
+ )
+ )
return data
class AddNewNoteOptionSerializer(serializers.ModelSerializer):
-
class Meta:
model = Notes
- fields = ['entry', 'private', 'note_type']
+ fields = ["entry", "private", "note_type"]
class AddNewFileOptionSerializer(serializers.ModelSerializer):
-
class Meta:
model = FileUpload
- fields = '__all__'
+ fields = "__all__"
class FindingToNotesSerializer(serializers.Serializer):
- finding_id = serializers.PrimaryKeyRelatedField(queryset=Finding.objects.all(), many=False, allow_null=True)
+ finding_id = serializers.PrimaryKeyRelatedField(
+ queryset=Finding.objects.all(), many=False, allow_null=True
+ )
notes = NoteSerializer(many=True)
class FindingToFilesSerializer(serializers.Serializer):
- finding_id = serializers.PrimaryKeyRelatedField(queryset=Finding.objects.all(), many=False, allow_null=True)
+ finding_id = serializers.PrimaryKeyRelatedField(
+ queryset=Finding.objects.all(), many=False, allow_null=True
+ )
files = FileSerializer(many=True)
+ def to_representation(self, data):
+ finding = data.get("finding_id")
+ files = data.get("files")
+ new_files = []
+ for file in files:
+ new_files.append(
+ {
+ "id": file.id,
+ "file": "{site_url}/{file_access_url}".format(
+ site_url=settings.SITE_URL,
+ file_access_url=file.get_accessible_url(
+ finding, finding.id
+ ),
+ ),
+ "title": file.title,
+ }
+ )
+ new_data = {"finding_id": finding.id, "files": new_files}
+ return new_data
+
+
+class FindingCloseSerializer(serializers.ModelSerializer):
+ is_mitigated = serializers.BooleanField(required=False)
+ mitigated = serializers.DateTimeField(required=False)
+ false_p = serializers.BooleanField(required=False)
+ out_of_scope = serializers.BooleanField(required=False)
+ duplicate = serializers.BooleanField(required=False)
+
+ class Meta:
+ model = Finding
+ fields = (
+ "is_mitigated",
+ "mitigated",
+ "false_p",
+ "out_of_scope",
+ "duplicate",
+ )
+
class ReportGenerateOptionSerializer(serializers.Serializer):
include_finding_notes = serializers.BooleanField(default=False)
@@ -1720,7 +2882,9 @@ class ExecutiveSummarySerializer(serializers.Serializer):
test_target_start = serializers.DateTimeField()
test_target_end = serializers.DateTimeField()
test_environment_name = serializers.CharField(max_length=200)
- test_strategy_ref = serializers.URLField(max_length=200, min_length=None, allow_blank=True)
+ test_strategy_ref = serializers.URLField(
+ max_length=200, min_length=None, allow_blank=True
+ )
total_findings = serializers.IntegerField()
@@ -1740,7 +2904,9 @@ class ReportGenerateSerializer(serializers.Serializer):
title = serializers.CharField(max_length=200)
user_id = serializers.IntegerField()
host = serializers.CharField(max_length=200)
- finding_notes = FindingToNotesSerializer(many=True, allow_null=True, required=False)
+ finding_notes = FindingToNotesSerializer(
+ many=True, allow_null=True, required=False
+ )
class TagSerializer(serializers.Serializer):
@@ -1748,25 +2914,26 @@ class TagSerializer(serializers.Serializer):
class SystemSettingsSerializer(TaggitSerializer, serializers.ModelSerializer):
-
class Meta:
model = System_Settings
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
-
if self.instance is not None:
default_group = self.instance.default_group
default_group_role = self.instance.default_group_role
- if 'default_group' in data:
- default_group = data['default_group']
- if 'default_group_role' in data:
- default_group_role = data['default_group_role']
+ if "default_group" in data:
+ default_group = data["default_group"]
+ if "default_group_role" in data:
+ default_group_role = data["default_group_role"]
- if (default_group is None and default_group_role is not None) or \
- (default_group is not None and default_group_role is None):
- raise ValidationError('default_group and default_group_role must either both be set or both be empty.')
+ if (default_group is None and default_group_role is not None) or (
+ default_group is not None and default_group_role is None
+ ):
+ raise ValidationError(
+ "default_group and default_group_role must either both be set or both be empty."
+ )
return data
@@ -1776,34 +2943,74 @@ class FindingNoteSerializer(serializers.Serializer):
class NotificationsSerializer(serializers.ModelSerializer):
- product = serializers.PrimaryKeyRelatedField(queryset=Product.objects.all(),
- required=False,
- default=None,
- allow_null=True)
- user = serializers.PrimaryKeyRelatedField(queryset=Dojo_User.objects.all(),
- required=False,
- default=None,
- allow_null=True)
- product_type_added = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- product_added = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- engagement_added = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- test_added = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- scan_added = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- jira_update = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- upcoming_engagement = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- stale_engagement = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- auto_close_engagement = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- close_engagement = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- user_mentioned = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- code_review = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- review_requested = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- other = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- sla_breach = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
- risk_acceptance_expiration = MultipleChoiceField(choices=NOTIFICATION_CHOICES)
+ product = serializers.PrimaryKeyRelatedField(
+ queryset=Product.objects.all(),
+ required=False,
+ default=None,
+ allow_null=True,
+ )
+ user = serializers.PrimaryKeyRelatedField(
+ queryset=Dojo_User.objects.all(),
+ required=False,
+ default=None,
+ allow_null=True,
+ )
+ product_type_added = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ product_added = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ engagement_added = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ test_added = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ scan_added = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ jira_update = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ upcoming_engagement = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ stale_engagement = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ auto_close_engagement = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ close_engagement = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ user_mentioned = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ code_review = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ review_requested = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ other = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ sla_breach = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ sla_breach_combined = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ risk_acceptance_expiration = MultipleChoiceField(
+ choices=NOTIFICATION_CHOICES, default=DEFAULT_NOTIFICATION
+ )
+ template = serializers.BooleanField(default=False)
class Meta:
model = Notifications
- fields = '__all__'
+ fields = "__all__"
def validate(self, data):
user = None
@@ -1813,15 +3020,28 @@ def validate(self, data):
user = self.instance.user
product = self.instance.product
- if 'user' in data:
- user = data.get('user')
- if 'product' in data:
- product = data.get('product')
-
- if self.instance is None or user != self.instance.user or product != self.instance.product:
- notifications = Notifications.objects.filter(user=user, product=product).count()
+ if "user" in data:
+ user = data.get("user")
+ if "product" in data:
+ product = data.get("product")
+
+ if (
+ self.instance is None
+ or user != self.instance.user
+ or product != self.instance.product
+ ):
+ notifications = Notifications.objects.filter(
+ user=user, product=product, template=False
+ ).count()
if notifications > 0:
- raise ValidationError("Notification for user and product already exists")
+ raise ValidationError(
+ "Notification for user and product already exists"
+ )
+ if (
+ data.get("template")
+ and Notifications.objects.filter(template=True).count() > 0
+ ):
+ raise ValidationError("Notification template already exists")
return data
@@ -1829,13 +3049,33 @@ def validate(self, data):
class EngagementPresetsSerializer(serializers.ModelSerializer):
class Meta:
model = Engagement_Presets
- fields = '__all__'
+ fields = "__all__"
class NetworkLocationsSerializer(serializers.ModelSerializer):
class Meta:
model = Network_Locations
- fields = '__all__'
+ fields = "__all__"
+
+
+class SLAConfigurationSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = SLA_Configuration
+ exclude = (
+ "async_updating",
+ )
+
+ def validate(self, data):
+ async_updating = getattr(self.instance, 'async_updating', None)
+ if async_updating:
+ for field in ['critical', 'high', 'medium', 'low']:
+ old_days = getattr(self.instance, field, None)
+ new_days = data.get(field, None)
+ if old_days and new_days and (old_days != new_days):
+ raise serializers.ValidationError(
+ 'Finding SLA expiration dates are currently being calculated. The SLA days for this SLA configuration cannot be changed until the calculation is complete.'
+ )
+ return data
class UserProfileSerializer(serializers.Serializer):
@@ -1845,3 +3085,142 @@ class UserProfileSerializer(serializers.Serializer):
dojo_group_member = DojoGroupMemberSerializer(many=True)
product_type_member = ProductTypeMemberSerializer(many=True)
product_member = ProductMemberSerializer(many=True)
+
+
+class DeletePreviewSerializer(serializers.Serializer):
+ model = serializers.CharField(read_only=True)
+ id = serializers.IntegerField(read_only=True, allow_null=True)
+ name = serializers.CharField(read_only=True)
+
+
+class ConfigurationPermissionSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Permission
+ exclude = ("content_type",)
+
+
+class QuestionnaireQuestionSerializer(serializers.ModelSerializer):
+ def to_representation(self, instance):
+ if isinstance(instance, TextQuestion):
+ return TextQuestionSerializer(instance=instance).data
+ elif isinstance(instance, ChoiceQuestion):
+ return ChoiceQuestionSerializer(instance=instance).data
+ else:
+ return QuestionSerializer(instance=instance).data
+
+ class Meta:
+ model = Question
+ exclude = ("polymorphic_ctype",)
+
+
+class QuestionSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Question
+ exclude = ("polymorphic_ctype",)
+
+
+class TextQuestionSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = TextQuestion
+ exclude = ("polymorphic_ctype",)
+
+
+class ChoiceQuestionSerializer(serializers.ModelSerializer):
+ choices = serializers.StringRelatedField(many=True)
+
+ class Meta:
+ model = ChoiceQuestion
+ exclude = ("polymorphic_ctype",)
+
+
+class QuestionnaireAnsweredSurveySerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Answered_Survey
+ fields = "__all__"
+
+
+class QuestionnaireAnswerSerializer(serializers.ModelSerializer):
+ def to_representation(self, instance):
+ if isinstance(instance, TextAnswer):
+ return TextAnswerSerializer(instance=instance).data
+ elif isinstance(instance, ChoiceAnswer):
+ return ChoiceAnswerSerializer(instance=instance).data
+ else:
+ return AnswerSerializer(instance=instance).data
+
+ class Meta:
+ model = Answer
+ exclude = ("polymorphic_ctype",)
+
+
+class AnswerSerializer(serializers.ModelSerializer):
+ question = serializers.StringRelatedField()
+ answered_survey = QuestionnaireAnsweredSurveySerializer()
+
+ class Meta:
+ model = Answer
+ exclude = ("polymorphic_ctype",)
+
+
+class TextAnswerSerializer(serializers.ModelSerializer):
+ question = serializers.StringRelatedField()
+ answered_survey = QuestionnaireAnsweredSurveySerializer()
+
+ class Meta:
+ model = TextAnswer
+ exclude = ("polymorphic_ctype",)
+
+
+class ChoiceAnswerSerializer(serializers.ModelSerializer):
+ answer = serializers.StringRelatedField(many=True)
+ question = serializers.StringRelatedField()
+ answered_survey = QuestionnaireAnsweredSurveySerializer()
+
+ class Meta:
+ model = ChoiceAnswer
+ exclude = ("polymorphic_ctype",)
+
+
+class QuestionnaireEngagementSurveySerializer(serializers.ModelSerializer):
+ questions = serializers.SerializerMethodField()
+
+ @extend_schema_field(serializers.ListField(child=serializers.CharField()))
+ @swagger_serializer_method(
+ serializers.ListField(child=serializers.CharField())
+ )
+ def get_questions(self, obj):
+ questions = obj.questions.all()
+ formated_questions = []
+ for question in questions:
+ formated_question = f"Order #{question.order} - {question.text}{' (Optional)' if question.optional else ''}"
+ formated_questions.append(formated_question)
+ return formated_questions
+
+ class Meta:
+ model = Engagement_Survey
+ fields = "__all__"
+
+
+class QuestionnaireGeneralSurveySerializer(serializers.ModelSerializer):
+ survey = QuestionnaireEngagementSurveySerializer()
+
+ class Meta:
+ model = General_Survey
+ fields = "__all__"
+
+
+class AnnouncementSerializer(serializers.ModelSerializer):
+
+ class Meta:
+ model = Announcement
+ fields = "__all__"
+
+ def create(self, validated_data):
+ validated_data["id"] = 1
+ try:
+ return super().create(validated_data)
+ except IntegrityError as e:
+ if 'duplicate key value violates unique constraint "dojo_announcement_pkey"' in str(e):
+ raise serializers.ValidationError("No more than one Announcement is allowed")
+ else:
+ raise
diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py
index 1df3f8ddc54..fceb87c7ea2 100644
--- a/dojo/api_v2/views.py
+++ b/dojo/api_v2/views.py
@@ -1,9 +1,10 @@
from rest_framework.generics import GenericAPIView
from drf_spectacular.types import OpenApiTypes
from crum import get_current_user
-from django.http import HttpResponse, Http404
+from django.http import HttpResponse, Http404, FileResponse
from django.shortcuts import get_object_or_404
from django.utils import timezone
+from django.contrib.auth.models import Permission
from django.core.exceptions import ValidationError
from django.utils.decorators import method_decorator
from drf_yasg.inspectors.base import NotHandled
@@ -11,98 +12,244 @@
from rest_framework import viewsets, mixins, status
from rest_framework.response import Response
from django.db import IntegrityError
-from rest_framework.permissions import DjangoModelPermissions, IsAuthenticated, IsAdminUser
+from rest_framework.permissions import DjangoModelPermissions, IsAuthenticated
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from django_filters.rest_framework import DjangoFilterBackend
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema, no_body
import base64
+import mimetypes
from dojo.engagement.services import close_engagement, reopen_engagement
-from dojo.importers.reimporter.utils import get_target_engagement_if_exists, get_target_product_if_exists, get_target_test_if_exists
-from dojo.models import Language_Type, Languages, Notifications, Product, Product_Type, Engagement, Test, Test_Import, Test_Type, Finding, \
- User, Stub_Finding, Finding_Template, Notes, \
- JIRA_Issue, Tool_Product_Settings, Tool_Configuration, Tool_Type, \
- Endpoint, JIRA_Project, JIRA_Instance, DojoMeta, Development_Environment, \
- Dojo_User, Note_Type, System_Settings, App_Analysis, Endpoint_Status, \
- Sonarqube_Issue, Sonarqube_Issue_Transition, Regulation, \
- BurpRawRequestResponse, FileUpload, Product_Type_Member, Product_Member, Dojo_Group, \
- Product_Group, Product_Type_Group, Role, Global_Role, Dojo_Group_Member, Engagement_Presets, Network_Locations, \
- UserContactInfo, Product_API_Scan_Configuration
-
+from dojo.importers.reimporter.utils import (
+ get_target_engagement_if_exists,
+ get_target_product_if_exists,
+ get_target_test_if_exists,
+)
+from dojo.models import (
+ Language_Type,
+ Languages,
+ Notifications,
+ Product,
+ Product_Type,
+ Engagement,
+ SLA_Configuration,
+ Test,
+ Test_Import,
+ Test_Type,
+ Finding,
+ User,
+ Stub_Finding,
+ Finding_Template,
+ Notes,
+ JIRA_Issue,
+ Tool_Product_Settings,
+ Tool_Configuration,
+ Tool_Type,
+ Endpoint,
+ JIRA_Project,
+ JIRA_Instance,
+ DojoMeta,
+ Development_Environment,
+ Dojo_User,
+ Note_Type,
+ System_Settings,
+ App_Analysis,
+ Endpoint_Status,
+ Sonarqube_Issue,
+ Sonarqube_Issue_Transition,
+ Regulation,
+ Risk_Acceptance,
+ BurpRawRequestResponse,
+ FileUpload,
+ Product_Type_Member,
+ Product_Member,
+ Dojo_Group,
+ Product_Group,
+ Product_Type_Group,
+ Role,
+ Global_Role,
+ Dojo_Group_Member,
+ Engagement_Presets,
+ Network_Locations,
+ UserContactInfo,
+ Product_API_Scan_Configuration,
+ Cred_Mapping,
+ Cred_User,
+ Question,
+ Answer,
+ Engagement_Survey,
+ Answered_Survey,
+ General_Survey,
+ Check_List,
+ Announcement,
+)
from dojo.endpoint.views import get_endpoint_ids
-from dojo.reports.views import report_url_resolver, prefetch_related_findings_for_report
-from dojo.finding.views import set_finding_as_original_internal, reset_finding_duplicate_status_internal, \
- duplicate_cluster
-from dojo.filters import ReportFindingFilter, \
- ApiFindingFilter, ApiProductFilter, ApiEngagementFilter, ApiEndpointFilter, \
- ApiAppAnalysisFilter, ApiTestFilter, ApiTemplateFindingFilter
+from dojo.reports.views import (
+ report_url_resolver,
+ prefetch_related_findings_for_report,
+)
+from dojo.finding.views import (
+ set_finding_as_original_internal,
+ reset_finding_duplicate_status_internal,
+ duplicate_cluster,
+)
+from dojo.filters import (
+ ReportFindingFilter,
+ ApiCredentialsFilter,
+ ApiFindingFilter,
+ ApiProductFilter,
+ ApiEngagementFilter,
+ ApiEndpointFilter,
+ ApiAppAnalysisFilter,
+ ApiTestFilter,
+ ApiTemplateFindingFilter,
+ ApiRiskAcceptanceFilter,
+)
from dojo.risk_acceptance import api as ra_api
from dateutil.relativedelta import relativedelta
from django.conf import settings
from datetime import datetime
-from dojo.utils import get_period_counts_legacy, get_system_setting
-from dojo.api_v2 import serializers, permissions, prefetch, schema
+from dojo.utils import (
+ get_system_setting,
+ get_setting,
+ async_delete,
+)
+from dojo.api_v2 import (
+ serializers,
+ permissions,
+ prefetch,
+ schema,
+ mixins as dojo_mixins,
+)
import dojo.jira_link.helper as jira_helper
import logging
import tagulous
-from dojo.product_type.queries import get_authorized_product_types, get_authorized_product_type_members, \
- get_authorized_product_type_groups
-from dojo.product.queries import get_authorized_products, get_authorized_app_analysis, get_authorized_dojo_meta, \
- get_authorized_product_members, get_authorized_product_groups, get_authorized_languages, \
- get_authorized_engagement_presets, get_authorized_product_api_scan_configurations
+from dojo.product_type.queries import (
+ get_authorized_product_types,
+ get_authorized_product_type_members,
+ get_authorized_product_type_groups,
+)
+from dojo.product.queries import (
+ get_authorized_products,
+ get_authorized_app_analysis,
+ get_authorized_dojo_meta,
+ get_authorized_product_members,
+ get_authorized_product_groups,
+ get_authorized_languages,
+ get_authorized_engagement_presets,
+ get_authorized_product_api_scan_configurations,
+)
from dojo.engagement.queries import get_authorized_engagements
+from dojo.risk_acceptance.queries import get_authorized_risk_acceptances
from dojo.test.queries import get_authorized_tests, get_authorized_test_imports
-from dojo.finding.queries import get_authorized_findings, get_authorized_stub_findings
-from dojo.endpoint.queries import get_authorized_endpoints, get_authorized_endpoint_status
-from dojo.group.queries import get_authorized_groups, get_authorized_group_members
-from dojo.jira_link.queries import get_authorized_jira_projects, get_authorized_jira_issues
+from dojo.finding.queries import (
+ get_authorized_findings,
+ get_authorized_stub_findings,
+)
+from dojo.endpoint.queries import (
+ get_authorized_endpoints,
+ get_authorized_endpoint_status,
+)
+from dojo.group.queries import (
+ get_authorized_groups,
+ get_authorized_group_members,
+)
+from dojo.jira_link.queries import (
+ get_authorized_jira_projects,
+ get_authorized_jira_issues,
+)
from dojo.tool_product.queries import get_authorized_tool_product_settings
-from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema, extend_schema_view
+from dojo.cred.queries import get_authorized_cred_mappings
+from drf_spectacular.utils import (
+ OpenApiParameter,
+ OpenApiResponse,
+ extend_schema,
+ extend_schema_view,
+)
+from drf_spectacular.views import SpectacularAPIView
+from drf_spectacular.renderers import OpenApiJsonRenderer2
from dojo.authorization.roles_permissions import Permissions
+from dojo.user.utils import get_configuration_permissions_codenames
logger = logging.getLogger(__name__)
+class DojoOpenApiJsonRenderer(OpenApiJsonRenderer2):
+ def get_indent(self, accepted_media_type, renderer_context):
+ if accepted_media_type and 'indent' in accepted_media_type:
+ return super().get_indent(accepted_media_type, renderer_context)
+ return renderer_context.get('indent', None)
+
+
+class DojoSpectacularAPIView(SpectacularAPIView):
+ renderer_classes = [DojoOpenApiJsonRenderer] + SpectacularAPIView.renderer_classes
+
+
+class DojoModelViewSet(
+ viewsets.ModelViewSet,
+ dojo_mixins.DeletePreviewModelMixin,
+):
+ pass
+
+
+class PrefetchDojoModelViewSet(
+ prefetch.PrefetchListMixin,
+ prefetch.PrefetchRetrieveMixin,
+ DojoModelViewSet,
+):
+ pass
+
+
# Authorization: authenticated users
-class RoleViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- viewsets.GenericViewSet):
+class RoleViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.RoleSerializer
queryset = Role.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name')
- permission_classes = (IsAuthenticated, )
+ filterset_fields = ["id", "name"]
+ permission_classes = (IsAuthenticated,)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class DojoGroupViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class DojoGroupViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.DojoGroupSerializer
queryset = Dojo_Group.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name')
- swagger_schema = prefetch.get_prefetch_schema(["dojo_groups_list", "dojo_groups_read"],
- serializers.DojoGroupSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasDojoGroupPermission)
+ filterset_fields = ["id", "name", "social_provider"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["dojo_groups_list", "dojo_groups_read"],
+ serializers.DojoGroupSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasDojoGroupPermission,
+ )
def get_queryset(self):
return get_authorized_groups(Permissions.Group_View).distinct()
@@ -110,72 +257,90 @@ def get_queryset(self):
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class DojoGroupMemberViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class DojoGroupMemberViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.DojoGroupMemberSerializer
queryset = Dojo_Group_Member.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'group_id', 'user_id')
- swagger_schema = prefetch.get_prefetch_schema(["dojo_group_members_list", "dojo_group_members_read"],
- serializers.DojoGroupMemberSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasDojoGroupMemberPermission)
+ filterset_fields = ["id", "group_id", "user_id"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["dojo_group_members_list", "dojo_group_members_read"],
+ serializers.DojoGroupMemberSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasDojoGroupMemberPermission,
+ )
def get_queryset(self):
return get_authorized_group_members(Permissions.Group_View).distinct()
+ @extend_schema(
+ exclude=True
+ )
+ @swagger_auto_schema(
+ auto_schema=None
+ )
def partial_update(self, request, pk=None):
# Object authorization won't work if not all data is provided
- response = {'message': 'Patch function is not offered in this path.'}
+ response = {"message": "Patch function is not offered in this path."}
return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED)
# Authorization: superuser
-class GlobalRoleViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class GlobalRoleViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.GlobalRoleSerializer
queryset = Global_Role.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'user', 'group', 'role')
- swagger_schema = prefetch.get_prefetch_schema(["global_roles_list", "global_roles_read"],
- serializers.GlobalRoleSerializer).to_schema()
+ filterset_fields = ["id", "user", "group", "role"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["global_roles_list", "global_roles_read"],
+ serializers.GlobalRoleSerializer,
+ ).to_schema()
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
# Authorization: object-based
-class EndPointViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class EndPointViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.EndpointSerializer
queryset = Endpoint.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_class = ApiEndpointFilter
- permission_classes = (IsAuthenticated, permissions.UserHasEndpointPermission)
+ filterset_class = ApiEndpointFilter
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["endpoints_list", "endpoints_read"], serializers.EndpointSerializer
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasEndpointPermission,
+ )
def get_queryset(self):
return get_authorized_endpoints(Permissions.Endpoint_View).distinct()
@@ -188,21 +353,34 @@ def get_queryset(self):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=True, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=True, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request, pk=None):
endpoint = self.get_object()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, endpoint, options)
report = serializers.ReportGenerateSerializer(data)
@@ -210,50 +388,85 @@ def generate_report(self, request, pk=None):
# Authorization: object-based
-class EndpointStatusViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class EndpointStatusViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.EndpointStatusSerializer
queryset = Endpoint_Status.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('mitigated', 'false_positive', 'out_of_scope',
- 'risk_accepted', 'mitigated_by', 'finding', 'endpoint')
- permission_classes = (IsAuthenticated, permissions.UserHasEndpointStatusPermission)
+ filterset_fields = [
+ "mitigated",
+ "false_positive",
+ "out_of_scope",
+ "risk_accepted",
+ "mitigated_by",
+ "finding",
+ "endpoint",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["endpoint_status_list", "endpoint_status_read"],
+ serializers.EndpointStatusSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasEndpointStatusPermission,
+ )
def get_queryset(self):
- return get_authorized_endpoint_status(Permissions.Endpoint_View).distinct()
+ return get_authorized_endpoint_status(
+ Permissions.Endpoint_View
+ ).distinct()
# Authorization: object-based
-class EngagementViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- ra_api.AcceptedRisksMixin,
- viewsets.GenericViewSet):
+class EngagementViewSet(
+ PrefetchDojoModelViewSet,
+ ra_api.AcceptedRisksMixin,
+):
serializer_class = serializers.EngagementSerializer
queryset = Engagement.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_class = ApiEngagementFilter
- permission_classes = (IsAuthenticated, permissions.UserHasEngagementPermission)
+ filterset_class = ApiEngagementFilter
+ swagger_schema = (
+ prefetch.get_prefetch_schema(
+ ["engagements_list", "engagements_read"],
+ serializers.EngagementSerializer,
+ )
+ .composeWith(
+ prefetch.get_prefetch_schema(
+ ["engagements_complete_checklist_read"],
+ serializers.EngagementCheckListSerializer,
+ )
+ )
+ .to_schema()
+ )
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasEngagementPermission,
+ )
@property
def risk_application_model_class(self):
return Engagement
+ def destroy(self, request, *args, **kwargs):
+ instance = self.get_object()
+ if get_setting("ASYNC_OBJECT_DELETE"):
+ async_del = async_delete()
+ async_del.delete(instance)
+ else:
+ instance.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
def get_queryset(self):
- return get_authorized_engagements(Permissions.Engagement_View).prefetch_related(
- 'notes',
- 'risk_acceptance',
- 'files').distinct()
+ return (
+ get_authorized_engagements(Permissions.Engagement_View)
+ .prefetch_related("notes", "risk_acceptance", "files")
+ .distinct()
+ )
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_200_OK: ""}
+ request=OpenApiTypes.NONE, responses={status.HTTP_200_OK: ""}
)
@swagger_auto_schema(
request_body=no_body, responses={status.HTTP_200_OK: ""}
@@ -265,8 +478,7 @@ def close(self, request, pk=None):
return HttpResponse()
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_200_OK: ""}
+ request=OpenApiTypes.NONE, responses={status.HTTP_200_OK: ""}
)
@swagger_auto_schema(
request_body=no_body, responses={status.HTTP_200_OK: ""}
@@ -285,355 +497,718 @@ def reopen(self, request, pk=None):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=True, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=True, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request, pk=None):
engagement = self.get_object()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, engagement, options)
report = serializers.ReportGenerateSerializer(data)
return Response(report.data)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.EngagementToNotesSerializer}
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.EngagementToNotesSerializer
+ },
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.EngagementToNotesSerializer}
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.EngagementToNotesSerializer
+ },
)
@swagger_auto_schema(
- methods=['post'],
+ methods=["post"],
request_body=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@action(detail=True, methods=["get", "post"])
def notes(self, request, pk=None):
engagement = self.get_object()
- if request.method == 'POST':
- new_note = serializers.AddNewNoteOptionSerializer(data=request.data)
+ if request.method == "POST":
+ new_note = serializers.AddNewNoteOptionSerializer(
+ data=request.data
+ )
if new_note.is_valid():
- entry = new_note.validated_data['entry']
- private = new_note.validated_data.get('private', False)
- note_type = new_note.validated_data.get('note_type', None)
+ entry = new_note.validated_data["entry"]
+ private = new_note.validated_data.get("private", False)
+ note_type = new_note.validated_data.get("note_type", None)
else:
- return Response(new_note.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_note.errors, status=status.HTTP_400_BAD_REQUEST
+ )
author = request.user
- note = Notes(entry=entry, author=author, private=private, note_type=note_type)
+ note = Notes(
+ entry=entry,
+ author=author,
+ private=private,
+ note_type=note_type,
+ )
note.save()
engagement.notes.add(note)
- serialized_note = serializers.NoteSerializer({
- "author": author, "entry": entry,
- "private": private
- })
- result = serializers.EngagementToNotesSerializer({
- "engagement_id": engagement, "notes": [serialized_note.data]
- })
- return Response(serialized_note.data,
- status=status.HTTP_201_CREATED)
+ serialized_note = serializers.NoteSerializer(
+ {"author": author, "entry": entry, "private": private}
+ )
+ return Response(
+ serialized_note.data, status=status.HTTP_201_CREATED
+ )
notes = engagement.notes.all()
- serialized_notes = serializers.EngagementToNotesSerializer({
- "engagement_id": engagement, "notes": notes
- })
- return Response(serialized_notes.data,
- status=status.HTTP_200_OK)
+ serialized_notes = serializers.EngagementToNotesSerializer(
+ {"engagement_id": engagement, "notes": notes}
+ )
+ return Response(serialized_notes.data, status=status.HTTP_200_OK)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.EngagementToFilesSerializer}
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.EngagementToFilesSerializer
+ },
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.EngagementToFilesSerializer}
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.EngagementToFilesSerializer
+ },
)
@swagger_auto_schema(
- method='post',
+ method="post",
request_body=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
+ )
+ @action(
+ detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,)
)
- @action(detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,))
def files(self, request, pk=None):
engagement = self.get_object()
- if request.method == 'POST':
+ if request.method == "POST":
new_file = serializers.FileSerializer(data=request.data)
if new_file.is_valid():
- title = new_file.validated_data['title']
- file = new_file.validated_data['file']
+ title = new_file.validated_data["title"]
+ file = new_file.validated_data["file"]
else:
- return Response(new_file.errors, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_file.errors, status=status.HTTP_400_BAD_REQUEST
+ )
file = FileUpload(title=title, file=file)
file.save()
engagement.files.add(file)
serialized_file = serializers.FileSerializer(file)
- return Response(serialized_file.data, status=status.HTTP_201_CREATED)
+ return Response(
+ serialized_file.data, status=status.HTTP_201_CREATED
+ )
files = engagement.files.all()
- serialized_files = serializers.EngagementToFilesSerializer({
- "engagement_id": engagement, "files": files
- })
+ serialized_files = serializers.EngagementToFilesSerializer(
+ {"engagement_id": engagement, "files": files}
+ )
return Response(serialized_files.data, status=status.HTTP_200_OK)
+ @extend_schema(
+ methods=["POST"],
+ request=serializers.EngagementCheckListSerializer,
+ responses={
+ status.HTTP_201_CREATED: serializers.EngagementCheckListSerializer
+ },
+ )
+ @swagger_auto_schema(
+ method="post",
+ request_body=serializers.EngagementCheckListSerializer,
+ responses={
+ status.HTTP_201_CREATED: serializers.EngagementCheckListSerializer
+ },
+ )
+ @action(detail=True, methods=["get", "post"])
+ def complete_checklist(self, request, pk=None):
+ from dojo.api_v2.prefetch.prefetcher import _Prefetcher
+
+ engagement = self.get_object()
+ check_lists = Check_List.objects.filter(engagement=engagement)
+ if request.method == "POST":
+ if check_lists.count() > 0:
+ return Response(
+ {
+ "message": "A completed checklist for this engagement already exists."
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ check_list = serializers.EngagementCheckListSerializer(
+ data=request.data
+ )
+ if not check_list.is_valid():
+ return Response(
+ check_list.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+ check_list = Check_List(**check_list.data)
+ check_list.engagement = engagement
+ check_list.save()
+ serialized_check_list = serializers.EngagementCheckListSerializer(
+ check_list
+ )
+ return Response(
+ serialized_check_list.data, status=status.HTTP_201_CREATED
+ )
+ prefetch_params = request.GET.get("prefetch", "").split(",")
+ prefetcher = _Prefetcher()
+ entry = check_lists.first()
+ # Get the queried object representation
+ result = serializers.EngagementCheckListSerializer(entry).data
+ prefetcher._prefetch(entry, prefetch_params)
+ result["prefetch"] = prefetcher.prefetched_data
+ return Response(result, status=status.HTTP_200_OK)
+
+ @extend_schema(
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @swagger_auto_schema(
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @action(
+ detail=True,
+ methods=["get"],
+ url_path=r"files/download/(?P\d+)",
+ )
+ def download_file(self, request, file_id, pk=None):
+ engagement = self.get_object()
+ # Get the file object
+ file_object_qs = engagement.files.filter(id=file_id)
+ file_object = (
+ file_object_qs.first() if len(file_object_qs) > 0 else None
+ )
+ if file_object is None:
+ return Response(
+ {"error": "File ID not associated with Engagement"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ # Get the path of the file in media root
+ file_path = f"{settings.MEDIA_ROOT}/{file_object.file.url.lstrip(settings.MEDIA_URL)}"
+ file_handle = open(file_path, "rb")
+ # send file
+ response = FileResponse(
+ file_handle,
+ content_type=f"{mimetypes.guess_type(file_path)}",
+ status=status.HTTP_200_OK,
+ )
+ response["Content-Length"] = file_object.file.size
+ response[
+ "Content-Disposition"
+ ] = f'attachment; filename="{file_object.file.name}"'
+
+ return response
+
+
+class RiskAcceptanceViewSet(
+ PrefetchDojoModelViewSet
+):
+ serializer_class = serializers.RiskAcceptanceSerializer
+ queryset = Risk_Acceptance.objects.none()
+ filter_backends = (DjangoFilterBackend,)
+ filterset_class = ApiRiskAcceptanceFilter
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["risk_acceptance_list", "risk_acceptance_read"],
+ serializers.RiskAcceptanceSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasRiskAcceptancePermission,
+ )
+
+ def get_queryset(self):
+ return (
+ get_authorized_risk_acceptances(Permissions.Risk_Acceptance)
+ .prefetch_related(
+ "notes", "engagement_set", "owner", "accepted_findings"
+ )
+ .distinct()
+ )
+
+ @extend_schema(
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.RiskAcceptanceProofSerializer,
+ },
+ )
+ @swagger_auto_schema(
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.RiskAcceptanceProofSerializer,
+ },
+ )
+ @action(detail=True, methods=["get"])
+ def download_proof(self, request, pk=None):
+ risk_acceptance = self.get_object()
+ # Get the file object
+ file_object = risk_acceptance.path
+ if file_object is None or risk_acceptance.filename() is None:
+ return Response(
+ {"error": "Proof has not provided to this risk acceptance..."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ # Get the path of the file in media root
+ file_path = f"{settings.MEDIA_ROOT}/{file_object.name}"
+ file_handle = open(file_path, "rb")
+ # send file
+ response = FileResponse(
+ file_handle,
+ content_type=f"{mimetypes.guess_type(file_path)}",
+ status=status.HTTP_200_OK,
+ )
+ response["Content-Length"] = file_object.size
+ response[
+ "Content-Disposition"
+ ] = f'attachment; filename="{risk_acceptance.filename()}"'
+
+ return response
+
# These are technologies in the UI and the API!
# Authorization: object-based
-class AppAnalysisViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class AppAnalysisViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.AppAnalysisSerializer
queryset = App_Analysis.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_class = ApiAppAnalysisFilter
- permission_classes = (IsAuthenticated, permissions.UserHasAppAnalysisPermission)
+ filterset_class = ApiAppAnalysisFilter
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["technologies_list", "technologies_read"],
+ serializers.AppAnalysisSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasAppAnalysisPermission,
+ )
def get_queryset(self):
return get_authorized_app_analysis(Permissions.Product_View)
+# Authorization: object-based
+class CredentialsViewSet(
+ PrefetchDojoModelViewSet,
+):
+ serializer_class = serializers.CredentialSerializer
+ queryset = Cred_User.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["credentials_list", "credentials_read"],
+ serializers.CredentialSerializer,
+ ).to_schema()
+ permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
+
+
+# Authorization: configuration
+class CredentialsMappingViewSet(
+ PrefetchDojoModelViewSet,
+):
+ serializer_class = serializers.CredentialMappingSerializer
+ queryset = Cred_Mapping.objects.none()
+ filter_backends = (DjangoFilterBackend,)
+ filterset_class = ApiCredentialsFilter
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["credential_mappings_list", "credential_mappings_read"],
+ serializers.CredentialMappingSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasCredentialPermission,
+ )
+
+ def get_queryset(self):
+ return get_authorized_cred_mappings(Permissions.Credential_View)
+
+
# Authorization: configuration
-class FindingTemplatesViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class FindingTemplatesViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.FindingTemplateSerializer
queryset = Finding_Template.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_class = ApiTemplateFindingFilter
- permission_classes = (permissions.UserHasConfigurationPermissionStaff, )
+ filterset_class = ApiTemplateFindingFilter
+ permission_classes = (permissions.UserHasConfigurationPermissionStaff,)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("related_fields", OpenApiTypes.BOOL, OpenApiParameter.QUERY, required=False,
- description="Expand finding external relations (engagement, environment, product, \
- product_type, test, test_type)"),
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "related_fields",
+ OpenApiTypes.BOOL,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="Expand finding external relations (engagement, environment, product, \
+ product_type, test, test_type)",
+ ),
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "related_fields",
+ OpenApiTypes.BOOL,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="Expand finding external relations (engagement, environment, product, \
+ product_type, test, test_type)",
+ ),
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("related_fields", OpenApiTypes.BOOL, OpenApiParameter.QUERY, required=False,
- description="Expand finding external relations (engagement, environment, product, \
- product_type, test, test_type)"),
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class FindingViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- ra_api.AcceptedFindingsMixin,
- viewsets.GenericViewSet):
+class FindingViewSet(
+ prefetch.PrefetchListMixin,
+ prefetch.PrefetchRetrieveMixin,
+ mixins.UpdateModelMixin,
+ mixins.DestroyModelMixin,
+ mixins.CreateModelMixin,
+ ra_api.AcceptedFindingsMixin,
+ viewsets.GenericViewSet,
+ dojo_mixins.DeletePreviewModelMixin,
+):
serializer_class = serializers.FindingSerializer
queryset = Finding.objects.none()
filter_backends = (DjangoFilterBackend,)
filterset_class = ApiFindingFilter
- permission_classes = (IsAuthenticated, permissions.UserHasFindingPermission)
-
- _related_field_parameters = [openapi.Parameter(
- name="related_fields",
- in_=openapi.IN_QUERY,
- description="Expand finding external relations (engagement, environment, product, product_type, test, test_type)",
- type=openapi.TYPE_BOOLEAN)]
- swagger_schema = prefetch.get_prefetch_schema(["findings_list", "findings_read"], serializers.FindingSerializer). \
- composeWith(schema.ExtraParameters("findings_list", _related_field_parameters)). \
- composeWith(schema.ExtraParameters("findings_read", _related_field_parameters)). \
- to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasFindingPermission,
+ )
+
+ _related_field_parameters = [
+ openapi.Parameter(
+ name="related_fields",
+ in_=openapi.IN_QUERY,
+ description="Expand finding external relations (engagement, environment, product, product_type, test, test_type)",
+ type=openapi.TYPE_BOOLEAN,
+ )
+ ]
+ swagger_schema = (
+ prefetch.get_prefetch_schema(
+ ["findings_list", "findings_read"], serializers.FindingSerializer
+ )
+ .composeWith(
+ schema.ExtraParameters("findings_list", _related_field_parameters)
+ )
+ .composeWith(
+ schema.ExtraParameters("findings_read", _related_field_parameters)
+ )
+ .to_schema()
+ )
# Overriding mixins.UpdateModeMixin perform_update() method to grab push_to_jira
# data and add that as a parameter to .save()
def perform_update(self, serializer):
# IF JIRA is enabled and this product has a JIRA configuration
- push_to_jira = serializer.validated_data.get('push_to_jira')
+ push_to_jira = serializer.validated_data.get("push_to_jira")
jira_project = jira_helper.get_jira_project(serializer.instance)
- if get_system_setting('enable_jira') and jira_project:
+ if get_system_setting("enable_jira") and jira_project:
push_to_jira = push_to_jira or jira_project.push_all_issues
serializer.save(push_to_jira=push_to_jira)
def get_queryset(self):
- findings = get_authorized_findings(Permissions.Finding_View).prefetch_related('endpoints',
- 'reviewers',
- 'found_by',
- 'notes',
- 'risk_acceptance_set',
- 'test',
- 'tags',
- 'jira_issue',
- 'finding_group_set',
- 'files',
- 'burprawrequestresponse_set',
- 'endpoint_status',
- 'finding_meta',
- 'test__test_type',
- 'test__engagement',
- 'test__environment',
- 'test__engagement__product',
- 'test__engagement__product__prod_type')
+ findings = get_authorized_findings(
+ Permissions.Finding_View
+ ).prefetch_related(
+ "endpoints",
+ "reviewers",
+ "found_by",
+ "notes",
+ "risk_acceptance_set",
+ "test",
+ "tags",
+ "jira_issue",
+ "finding_group_set",
+ "files",
+ "burprawrequestresponse_set",
+ "status_finding",
+ "finding_meta",
+ "test__test_type",
+ "test__engagement",
+ "test__environment",
+ "test__engagement__product",
+ "test__engagement__product__prod_type",
+ )
return findings.distinct()
def get_serializer_class(self):
- if self.request and self.request.method == 'POST':
+ if self.request and self.request.method == "POST":
return serializers.FindingCreateSerializer
else:
return serializers.FindingSerializer
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.TagSerializer}
+ methods=["POST"],
+ request=serializers.FindingCloseSerializer,
+ responses={status.HTTP_200_OK: serializers.FindingCloseSerializer},
+ )
+ @swagger_auto_schema(
+ method="post",
+ request_body=serializers.FindingCloseSerializer,
+ responses={status.HTTP_200_OK: serializers.FindingCloseSerializer},
)
+ @action(detail=True, methods=["post"])
+ def close(self, request, pk=None):
+ finding = self.get_object()
+
+ if request.method == "POST":
+ finding_close = serializers.FindingCloseSerializer(
+ data=request.data
+ )
+ if finding_close.is_valid():
+ finding.is_mitigated = finding_close.validated_data[
+ "is_mitigated"
+ ]
+ if settings.EDITABLE_MITIGATED_DATA:
+ finding.mitigated = (
+ finding_close.validated_data["mitigated"]
+ or timezone.now()
+ )
+ else:
+ finding.mitigated = timezone.now()
+ finding.mitigated_by = request.user
+ finding.active = False
+ finding.false_p = finding_close.validated_data.get(
+ "false_p", False
+ )
+ finding.duplicate = finding_close.validated_data.get(
+ "duplicate", False
+ )
+ finding.out_of_scope = finding_close.validated_data.get(
+ "out_of_scope", False
+ )
+
+ endpoints_status = finding.status_finding.all()
+ for e_status in endpoints_status:
+ e_status.mitigated_by = request.user
+ if settings.EDITABLE_MITIGATED_DATA:
+ e_status.mitigated_time = (
+ finding_close.validated_data["mitigated"]
+ or timezone.now()
+ )
+ else:
+ e_status.mitigated_time = timezone.now()
+ e_status.mitigated = True
+ e_status.last_modified = timezone.now()
+ e_status.save()
+ finding.save()
+ else:
+ return Response(
+ finding_close.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+ serialized_finding = serializers.FindingCloseSerializer(finding)
+ return Response(serialized_finding.data)
+
@extend_schema(
- methods=['POST'],
+ methods=["GET"],
+ responses={status.HTTP_200_OK: serializers.TagSerializer},
+ )
+ @extend_schema(
+ methods=["POST"],
request=serializers.TagSerializer,
- responses={status.HTTP_201_CREATED: serializers.TagSerializer}
+ responses={status.HTTP_201_CREATED: serializers.TagSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.TagSerializer}
+ method="get", responses={status.HTTP_200_OK: serializers.TagSerializer}
)
@swagger_auto_schema(
- method='post',
+ method="post",
request_body=serializers.TagSerializer,
- responses={status.HTTP_200_OK: serializers.TagSerializer}
+ responses={status.HTTP_200_OK: serializers.TagSerializer},
)
- @action(detail=True, methods=['get', 'post'])
+ @action(detail=True, methods=["get", "post"])
def tags(self, request, pk=None):
finding = self.get_object()
- if request.method == 'POST':
+ if request.method == "POST":
new_tags = serializers.TagSerializer(data=request.data)
if new_tags.is_valid():
all_tags = finding.tags
- all_tags = serializers.TagSerializer({"tags": all_tags}).data['tags']
+ all_tags = serializers.TagSerializer({"tags": all_tags}).data[
+ "tags"
+ ]
- for tag in tagulous.utils.parse_tags(new_tags.validated_data['tags']):
+ for tag in tagulous.utils.parse_tags(
+ new_tags.validated_data["tags"]
+ ):
if tag not in all_tags:
all_tags.append(tag)
new_tags = tagulous.utils.render_tags(all_tags)
finding.tags = new_tags
finding.save()
else:
- return Response(new_tags.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_tags.errors, status=status.HTTP_400_BAD_REQUEST
+ )
tags = finding.tags
serialized_tags = serializers.TagSerializer({"tags": tags})
return Response(serialized_tags.data)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer}
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer
+ },
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.BurpRawRequestResponseSerializer,
- responses={status.HTTP_201_CREATED: serializers.BurpRawRequestResponseSerializer}
+ responses={
+ status.HTTP_201_CREATED: serializers.BurpRawRequestResponseSerializer
+ },
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer}
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer
+ },
)
@swagger_auto_schema(
- method='post',
+ method="post",
request_body=serializers.BurpRawRequestResponseSerializer,
- responses={status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer}
+ responses={
+ status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer
+ },
)
- @action(detail=True, methods=['get', 'post'])
+ @action(detail=True, methods=["get", "post"])
def request_response(self, request, pk=None):
finding = self.get_object()
- if request.method == 'POST':
- burps = serializers.BurpRawRequestResponseSerializer(data=request.data, many=isinstance(request.data, list))
+ if request.method == "POST":
+ burps = serializers.BurpRawRequestResponseSerializer(
+ data=request.data, many=isinstance(request.data, list)
+ )
if burps.is_valid():
- for pair in burps.validated_data['req_resp']:
+ for pair in burps.validated_data["req_resp"]:
burp_rr = BurpRawRequestResponse(
- finding=finding,
- burpRequestBase64=base64.b64encode(pair["request"].encode("utf-8")),
- burpResponseBase64=base64.b64encode(pair["response"].encode("utf-8")),
- )
+ finding=finding,
+ burpRequestBase64=base64.b64encode(
+ pair["request"].encode("utf-8")
+ ),
+ burpResponseBase64=base64.b64encode(
+ pair["response"].encode("utf-8")
+ ),
+ )
burp_rr.clean()
burp_rr.save()
else:
- return Response(burps.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ burps.errors, status=status.HTTP_400_BAD_REQUEST
+ )
burp_req_resp = BurpRawRequestResponse.objects.filter(finding=finding)
burp_list = []
for burp in burp_req_resp:
request = burp.get_request()
response = burp.get_response()
- burp_list.append({'request': request, 'response': response})
- serialized_burps = serializers.BurpRawRequestResponseSerializer({'req_resp': burp_list})
+ burp_list.append({"request": request, "response": response})
+ serialized_burps = serializers.BurpRawRequestResponseSerializer(
+ {"req_resp": burp_list}
+ )
return Response(serialized_burps.data)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.FindingToNotesSerializer}
+ methods=["GET"],
+ responses={status.HTTP_200_OK: serializers.FindingToNotesSerializer},
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.FindingToNotesSerializer}
+ method="get",
+ responses={status.HTTP_200_OK: serializers.FindingToNotesSerializer},
)
@swagger_auto_schema(
- methods=['post'],
+ methods=["post"],
request_body=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@action(detail=True, methods=["get", "post"])
def notes(self, request, pk=None):
finding = self.get_object()
- if request.method == 'POST':
- new_note = serializers.AddNewNoteOptionSerializer(data=request.data)
+ if request.method == "POST":
+ new_note = serializers.AddNewNoteOptionSerializer(
+ data=request.data
+ )
if new_note.is_valid():
- entry = new_note.validated_data['entry']
- private = new_note.validated_data.get('private', False)
- note_type = new_note.validated_data.get('note_type', None)
+ entry = new_note.validated_data["entry"]
+ private = new_note.validated_data.get("private", False)
+ note_type = new_note.validated_data.get("note_type", None)
else:
- return Response(new_note.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_note.errors, status=status.HTTP_400_BAD_REQUEST
+ )
author = request.user
- note = Notes(entry=entry, author=author, private=private, note_type=note_type)
+ note = Notes(
+ entry=entry,
+ author=author,
+ private=private,
+ note_type=note_type,
+ )
note.save()
finding.notes.add(note)
@@ -642,146 +1217,230 @@ def notes(self, request, pk=None):
elif finding.has_jira_group_issue:
jira_helper.add_comment(finding.finding_group, note)
- serialized_note = serializers.NoteSerializer({
- "author": author, "entry": entry,
- "private": private
- })
- result = serializers.FindingToNotesSerializer({
- "finding_id": finding, "notes": [serialized_note.data]
- })
- return Response(serialized_note.data,
- status=status.HTTP_201_CREATED)
+ serialized_note = serializers.NoteSerializer(
+ {"author": author, "entry": entry, "private": private}
+ )
+ return Response(
+ serialized_note.data, status=status.HTTP_201_CREATED
+ )
notes = finding.notes.all()
- serialized_notes = serializers.FindingToNotesSerializer({
- "finding_id": finding, "notes": notes
- })
- return Response(serialized_notes.data,
- status=status.HTTP_200_OK)
+ serialized_notes = serializers.FindingToNotesSerializer(
+ {"finding_id": finding, "notes": notes}
+ )
+ return Response(serialized_notes.data, status=status.HTTP_200_OK)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.FindingToFilesSerializer}
+ methods=["GET"],
+ responses={status.HTTP_200_OK: serializers.FindingToFilesSerializer},
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.FindingToFilesSerializer}
+ method="get",
+ responses={status.HTTP_200_OK: serializers.FindingToFilesSerializer},
)
@swagger_auto_schema(
- method='post',
+ method="post",
request_body=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
+ )
+ @action(
+ detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,)
)
- @action(detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,))
def files(self, request, pk=None):
finding = self.get_object()
- if request.method == 'POST':
+ if request.method == "POST":
new_file = serializers.FileSerializer(data=request.data)
if new_file.is_valid():
- title = new_file.validated_data['title']
- file = new_file.validated_data['file']
+ title = new_file.validated_data["title"]
+ file = new_file.validated_data["file"]
else:
- return Response(new_file.errors, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_file.errors, status=status.HTTP_400_BAD_REQUEST
+ )
file = FileUpload(title=title, file=file)
file.save()
finding.files.add(file)
serialized_file = serializers.FileSerializer(file)
- return Response(serialized_file.data, status=status.HTTP_201_CREATED)
+ return Response(
+ serialized_file.data, status=status.HTTP_201_CREATED
+ )
files = finding.files.all()
- serialized_files = serializers.FindingToFilesSerializer({
- "finding_id": finding, "files": files
- })
+ serialized_files = serializers.FindingToFilesSerializer(
+ {"finding_id": finding, "files": files}
+ )
return Response(serialized_files.data, status=status.HTTP_200_OK)
+ @extend_schema(
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @swagger_auto_schema(
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @action(
+ detail=True,
+ methods=["get"],
+ url_path=r"files/download/(?P\d+)",
+ )
+ def download_file(self, request, file_id, pk=None):
+ finding = self.get_object()
+ # Get the file object
+ file_object_qs = finding.files.filter(id=file_id)
+ file_object = (
+ file_object_qs.first() if len(file_object_qs) > 0 else None
+ )
+ if file_object is None:
+ return Response(
+ {"error": "File ID not associated with Finding"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ # Get the path of the file in media root
+ file_path = f"{settings.MEDIA_ROOT}/{file_object.file.url.lstrip(settings.MEDIA_URL)}"
+ file_handle = open(file_path, "rb")
+ # send file
+ response = FileResponse(
+ file_handle,
+ content_type=f"{mimetypes.guess_type(file_path)}",
+ status=status.HTTP_200_OK,
+ )
+ response["Content-Length"] = file_object.file.size
+ response[
+ "Content-Disposition"
+ ] = f'attachment; filename="{file_object.file.name}"'
+
+ return response
+
@extend_schema(
request=serializers.FindingNoteSerializer,
- responses={status.HTTP_204_NO_CONTENT: ""}
+ responses={status.HTTP_204_NO_CONTENT: ""},
)
@swagger_auto_schema(
request_body=serializers.FindingNoteSerializer,
- responses={status.HTTP_204_NO_CONTENT: ""}
+ responses={status.HTTP_204_NO_CONTENT: ""},
)
@action(detail=True, methods=["patch"])
def remove_note(self, request, pk=None):
"""Remove Note From Finding Note"""
finding = self.get_object()
notes = finding.notes.all()
- if request.data['note_id']:
- note = get_object_or_404(Notes.objects, id=request.data['note_id'])
+ if request.data["note_id"]:
+ note = get_object_or_404(Notes.objects, id=request.data["note_id"])
if note not in notes:
- return Response({"error": "Selected Note is not assigned to this Finding"},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {"error": "Selected Note is not assigned to this Finding"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
else:
- return Response({"error": "('note_id') parameter missing"},
- status=status.HTTP_400_BAD_REQUEST)
- if note.author.username == request.user.username or request.user.is_staff:
+ return Response(
+ {"error": "('note_id') parameter missing"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ if (
+ note.author.username == request.user.username
+ or request.user.is_superuser
+ ):
finding.notes.remove(note)
note.delete()
else:
- return Response({"error": "Delete Failed, You are not the Note's author"},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {"error": "Delete Failed, You are not the Note's author"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- return Response({"Success": "Selected Note has been Removed successfully"},
- status=status.HTTP_204_NO_CONTENT)
+ return Response(
+ {"Success": "Selected Note has been Removed successfully"},
+ status=status.HTTP_204_NO_CONTENT,
+ )
@extend_schema(
- methods=['PUT', 'PATCH'],
+ methods=["PUT", "PATCH"],
request=serializers.TagSerializer,
responses={status.HTTP_204_NO_CONTENT: ""},
)
@swagger_auto_schema(
- methods=['put', 'patch'],
+ methods=["put", "patch"],
request_body=serializers.TagSerializer,
responses={status.HTTP_204_NO_CONTENT: ""},
)
@action(detail=True, methods=["put", "patch"])
def remove_tags(self, request, pk=None):
- """ Remove Tag(s) from finding list of tags """
+ """Remove Tag(s) from finding list of tags"""
finding = self.get_object()
delete_tags = serializers.TagSerializer(data=request.data)
if delete_tags.is_valid():
all_tags = finding.tags
- all_tags = serializers.TagSerializer({"tags": all_tags}).data['tags']
+ all_tags = serializers.TagSerializer({"tags": all_tags}).data[
+ "tags"
+ ]
# serializer turns it into a string, but we need a list
- del_tags = tagulous.utils.parse_tags(delete_tags.validated_data['tags'])
+ del_tags = tagulous.utils.parse_tags(
+ delete_tags.validated_data["tags"]
+ )
if len(del_tags) < 1:
- return Response({"error": "Empty Tag List Not Allowed"},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {"error": "Empty Tag List Not Allowed"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
for tag in del_tags:
if tag not in all_tags:
- return Response({"error": "'{}' is not a valid tag in list".format(tag)},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {
+ "error": "'{}' is not a valid tag in list".format(
+ tag
+ )
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
all_tags.remove(tag)
new_tags = tagulous.utils.render_tags(all_tags)
finding.tags = new_tags
finding.save()
- return Response({"success": "Tag(s) Removed"},
- status=status.HTTP_204_NO_CONTENT)
+ return Response(
+ {"success": "Tag(s) Removed"},
+ status=status.HTTP_204_NO_CONTENT,
+ )
else:
- return Response(delete_tags.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ delete_tags.errors, status=status.HTTP_400_BAD_REQUEST
+ )
@extend_schema(
- responses={status.HTTP_200_OK: serializers.FindingSerializer(many=True)}
+ responses={
+ status.HTTP_200_OK: serializers.FindingSerializer(many=True)
+ }
)
@swagger_auto_schema(
- responses={status.HTTP_200_OK: serializers.FindingSerializer(many=True)}
+ responses={
+ status.HTTP_200_OK: serializers.FindingSerializer(many=True)
+ }
+ )
+ @action(
+ detail=True,
+ methods=["get"],
+ url_path=r"duplicate",
+ filter_backends=[],
+ pagination_class=None,
)
- @action(detail=True, methods=['get'], url_path=r'duplicate', filter_backends=[], pagination_class=None)
def get_duplicate_cluster(self, request, pk):
finding = self.get_object()
result = duplicate_cluster(request, finding)
- serializer = serializers.FindingSerializer(instance=result, many=True,
- context={"request": request})
+ serializer = serializers.FindingSerializer(
+ instance=result, many=True, context={"request": request}
+ )
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(
@@ -792,10 +1451,11 @@ def get_duplicate_cluster(self, request, pk):
request_body=no_body,
responses={status.HTTP_204_NO_CONTENT: ""},
)
- @action(detail=True, methods=['post'], url_path=r'duplicate/reset')
+ @action(detail=True, methods=["post"], url_path=r"duplicate/reset")
def reset_finding_duplicate_status(self, request, pk):
- finding = self.get_object()
- checked_duplicate_id = reset_finding_duplicate_status_internal(request.user, pk)
+ checked_duplicate_id = reset_finding_duplicate_status_internal(
+ request.user, pk
+ )
if checked_duplicate_id is None:
return Response(status=status.HTTP_400_BAD_REQUEST)
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -803,17 +1463,19 @@ def reset_finding_duplicate_status(self, request, pk):
@extend_schema(
request=OpenApiTypes.NONE,
parameters=[
- OpenApiParameter("new_fid", OpenApiTypes.INT, OpenApiParameter.PATH)
+ OpenApiParameter(
+ "new_fid", OpenApiTypes.INT, OpenApiParameter.PATH
+ )
],
responses={status.HTTP_204_NO_CONTENT: ""},
)
@swagger_auto_schema(
- responses={status.HTTP_204_NO_CONTENT: ""},
- request_body=no_body
+ responses={status.HTTP_204_NO_CONTENT: ""}, request_body=no_body
+ )
+ @action(
+ detail=True, methods=["post"], url_path=r"original/(?P\d+)"
)
- @action(detail=True, methods=['post'], url_path=r'original/(?P\d+)')
def set_finding_as_original(self, request, pk, new_fid):
- finding = self.get_object()
success = set_finding_as_original_internal(request.user, pk, new_fid)
if not success:
return Response(status=status.HTTP_400_BAD_REQUEST)
@@ -827,20 +1489,33 @@ def set_finding_as_original(self, request, pk, new_fid):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=False, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=False, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request):
findings = self.get_queryset()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, findings, options)
report = serializers.ReportGenerateSerializer(data)
@@ -848,27 +1523,34 @@ def generate_report(self, request):
def _get_metadata(self, request, finding):
metadata = DojoMeta.objects.filter(finding=finding)
- serializer = serializers.FindingMetaSerializer(instance=metadata, many=True)
+ serializer = serializers.FindingMetaSerializer(
+ instance=metadata, many=True
+ )
return Response(serializer.data, status=status.HTTP_200_OK)
def _edit_metadata(self, request, finding):
metadata_name = request.query_params.get("name", None)
if metadata_name is None:
- return Response("Metadata name is required", status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "Metadata name is required", status=status.HTTP_400_BAD_REQUEST
+ )
try:
DojoMeta.objects.update_or_create(
- name=metadata_name, finding=finding,
+ name=metadata_name,
+ finding=finding,
defaults={
"name": request.data.get("name"),
- "value": request.data.get("value")
- }
+ "value": request.data.get("value"),
+ },
)
return Response(data=request.data, status=status.HTTP_200_OK)
except IntegrityError:
- return Response("Update failed because the new name already exists",
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "Update failed because the new name already exists",
+ status=status.HTTP_400_BAD_REQUEST,
+ )
def _add_metadata(self, request, finding):
metadata_data = serializers.FindingMetaSerializer(data=request.data)
@@ -882,41 +1564,63 @@ def _add_metadata(self, request, finding):
metadata.validate_unique()
metadata.save()
except ValidationError:
- return Response("Create failed probably because the name of the metadata already exists", status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "Create failed probably because the name of the metadata already exists",
+ status=status.HTTP_400_BAD_REQUEST,
+ )
return Response(data=metadata_data.data, status=status.HTTP_200_OK)
else:
- return Response(metadata_data.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ metadata_data.errors, status=status.HTTP_400_BAD_REQUEST
+ )
def _remove_metadata(self, request, finding):
name = request.query_params.get("name", None)
if name is None:
- return Response("A metadata name must be provided", status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "A metadata name must be provided",
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- metadata = get_object_or_404(DojoMeta.objects, finding=finding, name=name)
+ metadata = get_object_or_404(
+ DojoMeta.objects, finding=finding, name=name
+ )
metadata.delete()
return Response("Metadata deleted", status=status.HTTP_200_OK)
@extend_schema(
- methods=['GET'],
+ methods=["GET"],
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer(many=True),
- status.HTTP_404_NOT_FOUND: OpenApiResponse(description="Returned if finding does not exist"),
+ status.HTTP_404_NOT_FOUND: OpenApiResponse(
+ description="Returned if finding does not exist"
+ ),
},
)
@extend_schema(
- methods=['DELETE'],
+ methods=["DELETE"],
parameters=[
- OpenApiParameter("name", OpenApiTypes.INT, OpenApiParameter.QUERY, required=True,
- description="name of the metadata to retrieve. If name is empty, return all the \
- metadata associated with the finding")
+ OpenApiParameter(
+ "name",
+ OpenApiTypes.INT,
+ OpenApiParameter.QUERY,
+ required=True,
+ description="name of the metadata to retrieve. If name is empty, return all the \
+ metadata associated with the finding",
+ )
],
responses={
- status.HTTP_200_OK: OpenApiResponse(description="Returned if the metadata was correctly deleted"),
- status.HTTP_404_NOT_FOUND: OpenApiResponse(description="Returned if finding does not exist"),
- status.HTTP_400_BAD_REQUEST: OpenApiResponse(description="Returned if there was a problem with the metadata information"),
+ status.HTTP_200_OK: OpenApiResponse(
+ description="Returned if the metadata was correctly deleted"
+ ),
+ status.HTTP_404_NOT_FOUND: OpenApiResponse(
+ description="Returned if finding does not exist"
+ ),
+ status.HTTP_400_BAD_REQUEST: OpenApiResponse(
+ description="Returned if there was a problem with the metadata information"
+ ),
},
# manual_parameters=[openapi.Parameter(
# name="name", in_=openapi.IN_QUERY, type=openapi.TYPE_STRING,
@@ -924,68 +1628,92 @@ def _remove_metadata(self, request, finding):
# metadata associated with the finding")]
)
@extend_schema(
- methods=['PUT'],
+ methods=["PUT"],
request=serializers.FindingMetaSerializer,
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer,
- status.HTTP_404_NOT_FOUND: OpenApiResponse(description="Returned if finding does not exist"),
- status.HTTP_400_BAD_REQUEST: OpenApiResponse(description="Returned if there was a problem with the metadata information"),
+ status.HTTP_404_NOT_FOUND: OpenApiResponse(
+ description="Returned if finding does not exist"
+ ),
+ status.HTTP_400_BAD_REQUEST: OpenApiResponse(
+ description="Returned if there was a problem with the metadata information"
+ ),
},
# manual_parameters=[openapi.Parameter(
# name="name", in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING,
# description="name of the metadata to edit")],
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.FindingMetaSerializer,
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer,
- status.HTTP_404_NOT_FOUND: OpenApiResponse(description="Returned if finding does not exist"),
- status.HTTP_400_BAD_REQUEST: OpenApiResponse(description="Returned if there was a problem with the metadata information"),
+ status.HTTP_404_NOT_FOUND: OpenApiResponse(
+ description="Returned if finding does not exist"
+ ),
+ status.HTTP_400_BAD_REQUEST: OpenApiResponse(
+ description="Returned if there was a problem with the metadata information"
+ ),
},
)
@swagger_auto_schema(
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer(many=True),
- status.HTTP_404_NOT_FOUND: "Returned if finding does not exist"
+ status.HTTP_404_NOT_FOUND: "Returned if finding does not exist",
},
- methods=['get']
+ methods=["get"],
)
@swagger_auto_schema(
responses={
status.HTTP_200_OK: "Returned if the metadata was correctly deleted",
status.HTTP_404_NOT_FOUND: "Returned if finding does not exist",
- status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information"
+ status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information",
},
- methods=['delete'],
- manual_parameters=[openapi.Parameter(
- name="name", in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING,
- description="name of the metadata to retrieve. If name is empty, return all the \
- metadata associated with the finding")]
+ methods=["delete"],
+ manual_parameters=[
+ openapi.Parameter(
+ name="name",
+ in_=openapi.IN_QUERY,
+ required=True,
+ type=openapi.TYPE_STRING,
+ description="name of the metadata to retrieve. If name is empty, return all the \
+ metadata associated with the finding",
+ )
+ ],
)
@swagger_auto_schema(
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer,
status.HTTP_404_NOT_FOUND: "Returned if finding does not exist",
- status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information"
+ status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information",
},
- methods=['put'],
- manual_parameters=[openapi.Parameter(
- name="name", in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING,
- description="name of the metadata to edit")],
- request_body=serializers.FindingMetaSerializer
+ methods=["put"],
+ manual_parameters=[
+ openapi.Parameter(
+ name="name",
+ in_=openapi.IN_QUERY,
+ required=True,
+ type=openapi.TYPE_STRING,
+ description="name of the metadata to edit",
+ )
+ ],
+ request_body=serializers.FindingMetaSerializer,
)
@swagger_auto_schema(
responses={
status.HTTP_200_OK: serializers.FindingMetaSerializer,
status.HTTP_404_NOT_FOUND: "Returned if finding does not exist",
- status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information"
+ status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information",
},
- methods=['post'],
- request_body=serializers.FindingMetaSerializer
+ methods=["post"],
+ request_body=serializers.FindingMetaSerializer,
+ )
+ @action(
+ detail=True,
+ methods=["post", "put", "delete", "get"],
+ filter_backends=[],
+ pagination_class=None,
)
- @action(detail=True, methods=["post", "put", "delete", "get"],
- filter_backends=[], pagination_class=None)
def metadata(self, request, pk=None):
finding = self.get_object()
@@ -1000,134 +1728,188 @@ def metadata(self, request, pk=None):
elif request.method == "DELETE":
return self._remove_metadata(request, finding)
- return Response({"error", "unsupported method"}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {"error", "unsupported method"}, status=status.HTTP_400_BAD_REQUEST
+ )
-# Authorization: superuser
-class JiraInstanceViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: configuration
+class JiraInstanceViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.JIRAInstanceSerializer
queryset = JIRA_Instance.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'url')
- permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
+ filterset_fields = ["id", "url"]
+ permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,)
# Authorization: object-based
-class JiraIssuesViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class JiraIssuesViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.JIRAIssueSerializer
queryset = JIRA_Issue.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'jira_id', 'jira_key', 'finding', 'engagement', 'finding_group')
- permission_classes = (IsAuthenticated, permissions.UserHasJiraIssuePermission)
+ filterset_fields = [
+ "id",
+ "jira_id",
+ "jira_key",
+ "finding",
+ "engagement",
+ "finding_group",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["jira_finding_mappings_list", "jira_finding_mappings_read"],
+ serializers.JIRAIssueSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasJiraIssuePermission,
+ )
def get_queryset(self):
return get_authorized_jira_issues(Permissions.Product_View)
# Authorization: object-based
-class JiraProjectViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class JiraProjectViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.JIRAProjectSerializer
queryset = JIRA_Project.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'jira_instance', 'product', 'engagement', 'component', 'project_key',
- 'push_all_issues', 'enable_engagement_epic_mapping',
- 'push_notes')
- permission_classes = (IsAuthenticated, permissions.UserHasJiraProductPermission)
+ filterset_fields = [
+ "id",
+ "jira_instance",
+ "product",
+ "engagement",
+ "component",
+ "project_key",
+ "push_all_issues",
+ "enable_engagement_epic_mapping",
+ "push_notes",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["jira_projects_list", "jira_projects_read"],
+ serializers.JIRAProjectSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasJiraProductPermission,
+ )
def get_queryset(self):
return get_authorized_jira_projects(Permissions.Product_View)
# Authorization: superuser
-class SonarqubeIssueViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class SonarqubeIssueViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.SonarqubeIssueSerializer
queryset = Sonarqube_Issue.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'key', 'status', 'type')
+ filterset_fields = ["id", "key", "status", "type"]
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
# Authorization: superuser
-class SonarqubeIssueTransitionViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class SonarqubeIssueTransitionViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.SonarqubeIssueTransitionSerializer
queryset = Sonarqube_Issue_Transition.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'sonarqube_issue', 'finding_status',
- 'sonarqube_status', 'transitions')
+ filterset_fields = [
+ "id",
+ "sonarqube_issue",
+ "finding_status",
+ "sonarqube_status",
+ "transitions",
+ ]
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
# Authorization: object-based
-class ProductAPIScanConfigurationViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class ProductAPIScanConfigurationViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductAPIScanConfigurationSerializer
queryset = Product_API_Scan_Configuration.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product', 'tool_configuration',
- 'service_key_1', 'service_key_2', 'service_key_3')
- permission_classes = (IsAuthenticated, permissions.UserHasProductAPIScanConfigurationPermission)
+ filterset_fields = [
+ "id",
+ "product",
+ "tool_configuration",
+ "service_key_1",
+ "service_key_2",
+ "service_key_3",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ [
+ "product_api_scan_configurations_list",
+ "product_api_scan_configurations_read",
+ ],
+ serializers.ProductAPIScanConfigurationSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductAPIScanConfigurationPermission,
+ )
def get_queryset(self):
- return get_authorized_product_api_scan_configurations(Permissions.Product_API_Scan_Configuration_View)
+ return get_authorized_product_api_scan_configurations(
+ Permissions.Product_API_Scan_Configuration_View
+ )
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class DojoMetaViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class DojoMetaViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.MetaSerializer
queryset = DojoMeta.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product', 'endpoint', 'finding', 'name', 'value')
- permission_classes = (IsAuthenticated, permissions.UserHasDojoMetaPermission)
- swagger_schema = prefetch.get_prefetch_schema(["metadata_list", "metadata_read"],
- serializers.MetaSerializer).to_schema()
+ filterset_fields = [
+ "id",
+ "product",
+ "endpoint",
+ "finding",
+ "name",
+ "value",
+ ]
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasDojoMetaPermission,
+ )
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["metadata_list", "metadata_read"], serializers.MetaSerializer
+ ).to_schema()
def get_queryset(self):
return get_authorized_dojo_meta(Permissions.Product_View)
@@ -1137,10 +1919,16 @@ def get_queryset(self):
class DjangoFilterDescriptionInspector(CoreAPICompatInspector):
def get_filter_parameters(self, filter_backend):
if isinstance(filter_backend, DjangoFilterBackend):
- result = super(DjangoFilterDescriptionInspector, self).get_filter_parameters(filter_backend)
+ result = super(
+ DjangoFilterDescriptionInspector, self
+ ).get_filter_parameters(filter_backend)
for param in result:
- if not param.get('description', ''):
- param.description = "Filter the returned list by {field_name}".format(field_name=param.name)
+ if not param.get("description", ""):
+ param.description = (
+ "Filter the returned list by {field_name}".format(
+ field_name=param.name
+ )
+ )
return result
@@ -1148,39 +1936,70 @@ def get_filter_parameters(self, filter_backend):
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-@method_decorator(name='list', decorator=swagger_auto_schema(
- filter_inspectors=[DjangoFilterDescriptionInspector]
-))
-class ProductViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+@method_decorator(
+ name="list",
+ decorator=swagger_auto_schema(
+ filter_inspectors=[DjangoFilterDescriptionInspector]
+ ),
+)
+class ProductViewSet(
+ prefetch.PrefetchListMixin,
+ prefetch.PrefetchRetrieveMixin,
+ mixins.CreateModelMixin,
+ mixins.DestroyModelMixin,
+ mixins.UpdateModelMixin,
+ viewsets.GenericViewSet,
+ dojo_mixins.DeletePreviewModelMixin,
+):
serializer_class = serializers.ProductSerializer
# TODO: prefetch
queryset = Product.objects.none()
filter_backends = (DjangoFilterBackend,)
filterset_class = ApiProductFilter
- swagger_schema = prefetch.get_prefetch_schema(["products_list", "products_read"], serializers.ProductSerializer). \
- to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductPermission)
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["products_list", "products_read"], serializers.ProductSerializer
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductPermission,
+ )
def get_queryset(self):
return get_authorized_products(Permissions.Product_View).distinct()
+ def destroy(self, request, *args, **kwargs):
+ instance = self.get_object()
+ if get_setting("ASYNC_OBJECT_DELETE"):
+ async_del = async_delete()
+ async_del.delete(instance)
+ else:
+ instance.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
# def list(self, request):
# print(vars(request))
# # Note the use of `get_queryset()` instead of `self.queryset`
@@ -1196,21 +2015,34 @@ def get_queryset(self):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=True, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=True, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request, pk=None):
product = self.get_object()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, product, options)
report = serializers.ReportGenerateSerializer(data)
@@ -1219,140 +2051,194 @@ def generate_report(self, request, pk=None):
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class ProductMemberViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ProductMemberViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductMemberSerializer
queryset = Product_Member.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product_id', 'user_id')
- swagger_schema = prefetch.get_prefetch_schema(["product_members_list", "product_members_read"],
- serializers.ProductMemberSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductMemberPermission)
+ filterset_fields = ["id", "product_id", "user_id"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["product_members_list", "product_members_read"],
+ serializers.ProductMemberSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductMemberPermission,
+ )
def get_queryset(self):
- return get_authorized_product_members(Permissions.Product_View).distinct()
+ return get_authorized_product_members(
+ Permissions.Product_View
+ ).distinct()
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ exclude=True
)
@swagger_auto_schema(
- request_body=no_body,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ auto_schema=None
)
def partial_update(self, request, pk=None):
# Object authorization won't work if not all data is provided
- response = {'message': 'Patch function is not offered in this path.'}
+ response = {"message": "Patch function is not offered in this path."}
return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class ProductGroupViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ProductGroupViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductGroupSerializer
queryset = Product_Group.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product_id', 'group_id')
- swagger_schema = prefetch.get_prefetch_schema(["product_groups_list", "product_groups_read"],
- serializers.ProductGroupSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductGroupPermission)
+ filterset_fields = ["id", "product_id", "group_id"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["product_groups_list", "product_groups_read"],
+ serializers.ProductGroupSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductGroupPermission,
+ )
def get_queryset(self):
- return get_authorized_product_groups(Permissions.Product_Group_View).distinct()
+ return get_authorized_product_groups(
+ Permissions.Product_Group_View
+ ).distinct()
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ exclude=True
)
@swagger_auto_schema(
- request_body=no_body,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ auto_schema=None
)
def partial_update(self, request, pk=None):
# Object authorization won't work if not all data is provided
- response = {'message': 'Patch function is not offered in this path.'}
+ response = {"message": "Patch function is not offered in this path."}
return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class ProductTypeViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class ProductTypeViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductTypeSerializer
queryset = Product_Type.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'critical_product', 'key_product', 'created', 'updated')
- swagger_schema = prefetch.get_prefetch_schema(["product_types_list", "product_types_read"],
- serializers.ProductTypeSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductTypePermission)
+ filterset_fields = [
+ "id",
+ "name",
+ "critical_product",
+ "key_product",
+ "created",
+ "updated",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["product_types_list", "product_types_read"],
+ serializers.ProductTypeSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductTypePermission,
+ )
def get_queryset(self):
- return get_authorized_product_types(Permissions.Product_Type_View).distinct()
+ return get_authorized_product_types(
+ Permissions.Product_Type_View
+ ).distinct()
# Overwrite perfom_create of CreateModelMixin to add current user as owner
def perform_create(self, serializer):
serializer.save()
product_type_data = serializer.data
- product_type_data.pop('authorization_groups')
- product_type_data.pop('members')
+ product_type_data.pop("authorization_groups")
+ product_type_data.pop("members")
member = Product_Type_Member()
member.user = self.request.user
member.product_type = Product_Type(**product_type_data)
member.role = Role.objects.get(is_owner=True)
member.save()
+ def destroy(self, request, *args, **kwargs):
+ instance = self.get_object()
+ if get_setting("ASYNC_OBJECT_DELETE"):
+ async_del = async_delete()
+ async_del.delete(instance)
+ else:
+ instance.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
@extend_schema(
request=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
@@ -1361,21 +2247,34 @@ def perform_create(self, serializer):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=True, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=True, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request, pk=None):
product_type = self.get_object()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, product_type, options)
report = serializers.ReportGenerateSerializer(data)
@@ -1384,135 +2283,167 @@ def generate_report(self, request, pk=None):
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class ProductTypeMemberViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ProductTypeMemberViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductTypeMemberSerializer
queryset = Product_Type_Member.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product_type_id', 'user_id')
- swagger_schema = prefetch.get_prefetch_schema(["product_type_members_list", "product_type_members_read"],
- serializers.ProductTypeMemberSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductTypeMemberPermission)
+ filterset_fields = ["id", "product_type_id", "user_id"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["product_type_members_list", "product_type_members_read"],
+ serializers.ProductTypeMemberSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductTypeMemberPermission,
+ )
def get_queryset(self):
- return get_authorized_product_type_members(Permissions.Product_Type_View).distinct()
+ return get_authorized_product_type_members(
+ Permissions.Product_Type_View
+ ).distinct()
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if instance.role.is_owner:
- owners = Product_Type_Member.objects.filter(product_type=instance.product_type, role__is_owner=True).count()
+ owners = Product_Type_Member.objects.filter(
+ product_type=instance.product_type, role__is_owner=True
+ ).count()
if owners <= 1:
- return Response('There must be at least one owner', status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "There must be at least one owner",
+ status=status.HTTP_400_BAD_REQUEST,
+ )
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ exclude=True
)
@swagger_auto_schema(
- request_body=no_body,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ auto_schema=None
)
def partial_update(self, request, pk=None):
# Object authorization won't work if not all data is provided
- response = {'message': 'Patch function is not offered in this path.'}
+ response = {"message": "Patch function is not offered in this path."}
return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class ProductTypeGroupViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ProductTypeGroupViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ProductTypeGroupSerializer
queryset = Product_Type_Group.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'product_type_id', 'group_id')
- swagger_schema = prefetch.get_prefetch_schema(["product_type_groups_list", "product_type_groups_read"],
- serializers.ProductTypeGroupSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasProductTypeGroupPermission)
+ filterset_fields = ["id", "product_type_id", "group_id"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["product_type_groups_list", "product_type_groups_read"],
+ serializers.ProductTypeGroupSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasProductTypeGroupPermission,
+ )
def get_queryset(self):
- return get_authorized_product_type_groups(Permissions.Product_Type_Group_View).distinct()
+ return get_authorized_product_type_groups(
+ Permissions.Product_Type_Group_View
+ ).distinct()
@extend_schema(
- request=OpenApiTypes.NONE,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ exclude=True
)
@swagger_auto_schema(
- request_body=no_body,
- responses={status.HTTP_405_METHOD_NOT_ALLOWED: ""},
+ auto_schema=None
)
def partial_update(self, request, pk=None):
# Object authorization won't work if not all data is provided
- response = {'message': 'Patch function is not offered in this path.'}
+ response = {"message": "Patch function is not offered in this path."}
return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED)
# Authorization: object-based
-class StubFindingsViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class StubFindingsViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.StubFindingSerializer
queryset = Stub_Finding.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'title', 'date', 'severity', 'description')
- permission_classes = (IsAuthenticated, permissions.UserHasFindingPermission)
+ filterset_fields = ["id", "title", "date", "severity", "description"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["stub_findings_list", "stub_findings_read"],
+ serializers.StubFindingSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasFindingPermission,
+ )
def get_queryset(self):
- return get_authorized_stub_findings(Permissions.Finding_View).distinct()
+ return get_authorized_stub_findings(
+ Permissions.Finding_View
+ ).distinct()
def get_serializer_class(self):
- if self.request and self.request.method == 'POST':
+ if self.request and self.request.method == "POST":
return serializers.StubFindingCreateSerializer
else:
return serializers.StubFindingSerializer
-# Authorization: configuration
-class DevelopmentEnvironmentViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: authenticated, configuration
+class DevelopmentEnvironmentViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.DevelopmentEnvironmentSerializer
queryset = Development_Environment.objects.all()
filter_backends = (DjangoFilterBackend,)
@@ -1520,17 +2451,17 @@ class DevelopmentEnvironmentViewSet(mixins.ListModelMixin,
# Authorization: object-based
-class TestsViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- ra_api.AcceptedRisksMixin,
- viewsets.GenericViewSet):
+class TestsViewSet(
+ PrefetchDojoModelViewSet,
+ ra_api.AcceptedRisksMixin,
+):
serializer_class = serializers.TestSerializer
queryset = Test.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_class = ApiTestFilter
+ filterset_class = ApiTestFilter
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["tests_list", "tests_read"], serializers.TestSerializer
+ ).to_schema()
permission_classes = (IsAuthenticated, permissions.UserHasTestPermission)
@property
@@ -1538,13 +2469,24 @@ def risk_application_model_class(self):
return Test
def get_queryset(self):
- return get_authorized_tests(Permissions.Test_View).prefetch_related(
- 'notes',
- 'files').distinct()
+ return (
+ get_authorized_tests(Permissions.Test_View)
+ .prefetch_related("notes", "files")
+ .distinct()
+ )
+
+ def destroy(self, request, *args, **kwargs):
+ instance = self.get_object()
+ if get_setting("ASYNC_OBJECT_DELETE"):
+ async_del = async_delete()
+ async_del.delete(instance)
+ else:
+ instance.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
def get_serializer_class(self):
- if self.request and self.request.method == 'POST':
- if self.action == 'accept_risks':
+ if self.request and self.request.method == "POST":
+ if self.action == "accept_risks":
return ra_api.AcceptedRiskSerializer
return serializers.TestCreateSerializer
else:
@@ -1558,339 +2500,469 @@ def get_serializer_class(self):
request_body=serializers.ReportGenerateOptionSerializer,
responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer},
)
- @action(detail=True, methods=['post'], permission_classes=[IsAuthenticated])
+ @action(
+ detail=True, methods=["post"], permission_classes=[IsAuthenticated]
+ )
def generate_report(self, request, pk=None):
test = self.get_object()
options = {}
# prepare post data
- report_options = serializers.ReportGenerateOptionSerializer(data=request.data)
+ report_options = serializers.ReportGenerateOptionSerializer(
+ data=request.data
+ )
if report_options.is_valid():
- options['include_finding_notes'] = report_options.validated_data['include_finding_notes']
- options['include_finding_images'] = report_options.validated_data['include_finding_images']
- options['include_executive_summary'] = report_options.validated_data['include_executive_summary']
- options['include_table_of_contents'] = report_options.validated_data['include_table_of_contents']
+ options["include_finding_notes"] = report_options.validated_data[
+ "include_finding_notes"
+ ]
+ options["include_finding_images"] = report_options.validated_data[
+ "include_finding_images"
+ ]
+ options[
+ "include_executive_summary"
+ ] = report_options.validated_data["include_executive_summary"]
+ options[
+ "include_table_of_contents"
+ ] = report_options.validated_data["include_table_of_contents"]
else:
- return Response(report_options.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ report_options.errors, status=status.HTTP_400_BAD_REQUEST
+ )
data = report_generate(request, test, options)
report = serializers.ReportGenerateSerializer(data)
return Response(report.data)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.TestToNotesSerializer}
+ methods=["GET"],
+ responses={status.HTTP_200_OK: serializers.TestToNotesSerializer},
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.TestToNotesSerializer}
+ method="get",
+ responses={status.HTTP_200_OK: serializers.TestToNotesSerializer},
)
@swagger_auto_schema(
- methods=['post'],
+ methods=["post"],
request_body=serializers.AddNewNoteOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.NoteSerializer}
+ responses={status.HTTP_201_CREATED: serializers.NoteSerializer},
)
@action(detail=True, methods=["get", "post"])
def notes(self, request, pk=None):
test = self.get_object()
- if request.method == 'POST':
- new_note = serializers.AddNewNoteOptionSerializer(data=request.data)
+ if request.method == "POST":
+ new_note = serializers.AddNewNoteOptionSerializer(
+ data=request.data
+ )
if new_note.is_valid():
- entry = new_note.validated_data['entry']
- private = new_note.validated_data.get('private', False)
- note_type = new_note.validated_data.get('note_type', None)
+ entry = new_note.validated_data["entry"]
+ private = new_note.validated_data.get("private", False)
+ note_type = new_note.validated_data.get("note_type", None)
else:
- return Response(new_note.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_note.errors, status=status.HTTP_400_BAD_REQUEST
+ )
author = request.user
- note = Notes(entry=entry, author=author, private=private, note_type=note_type)
+ note = Notes(
+ entry=entry,
+ author=author,
+ private=private,
+ note_type=note_type,
+ )
note.save()
test.notes.add(note)
- serialized_note = serializers.NoteSerializer({
- "author": author, "entry": entry,
- "private": private
- })
- result = serializers.TestToNotesSerializer({
- "test_id": test, "notes": [serialized_note.data]
- })
- return Response(serialized_note.data,
- status=status.HTTP_201_CREATED)
+ serialized_note = serializers.NoteSerializer(
+ {"author": author, "entry": entry, "private": private}
+ )
+ return Response(
+ serialized_note.data, status=status.HTTP_201_CREATED
+ )
notes = test.notes.all()
- serialized_notes = serializers.TestToNotesSerializer({
- "test_id": test, "notes": notes
- })
- return Response(serialized_notes.data,
- status=status.HTTP_200_OK)
+ serialized_notes = serializers.TestToNotesSerializer(
+ {"test_id": test, "notes": notes}
+ )
+ return Response(serialized_notes.data, status=status.HTTP_200_OK)
@extend_schema(
- methods=['GET'],
- responses={status.HTTP_200_OK: serializers.TestToFilesSerializer}
+ methods=["GET"],
+ responses={status.HTTP_200_OK: serializers.TestToFilesSerializer},
)
@extend_schema(
- methods=['POST'],
+ methods=["POST"],
request=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
)
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.TestToFilesSerializer}
+ method="get",
+ responses={status.HTTP_200_OK: serializers.TestToFilesSerializer},
)
@swagger_auto_schema(
- method='post',
+ method="post",
request_body=serializers.AddNewFileOptionSerializer,
- responses={status.HTTP_201_CREATED: serializers.FileSerializer}
+ responses={status.HTTP_201_CREATED: serializers.FileSerializer},
+ )
+ @action(
+ detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,)
)
- @action(detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,))
def files(self, request, pk=None):
test = self.get_object()
- if request.method == 'POST':
+ if request.method == "POST":
new_file = serializers.FileSerializer(data=request.data)
if new_file.is_valid():
- title = new_file.validated_data['title']
- file = new_file.validated_data['file']
+ title = new_file.validated_data["title"]
+ file = new_file.validated_data["file"]
else:
- return Response(new_file.errors, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ new_file.errors, status=status.HTTP_400_BAD_REQUEST
+ )
file = FileUpload(title=title, file=file)
file.save()
test.files.add(file)
serialized_file = serializers.FileSerializer(file)
- return Response(serialized_file.data, status=status.HTTP_201_CREATED)
+ return Response(
+ serialized_file.data, status=status.HTTP_201_CREATED
+ )
files = test.files.all()
- serialized_files = serializers.TestToFilesSerializer({
- "test_id": test, "files": files
- })
+ serialized_files = serializers.TestToFilesSerializer(
+ {"test_id": test, "files": files}
+ )
return Response(serialized_files.data, status=status.HTTP_200_OK)
-
-# Authorization: configuration
-class TestTypesViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+ @extend_schema(
+ methods=["GET"],
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @swagger_auto_schema(
+ method="get",
+ responses={
+ status.HTTP_200_OK: serializers.RawFileSerializer,
+ },
+ )
+ @action(
+ detail=True,
+ methods=["get"],
+ url_path=r"files/download/(?P\d+)",
+ )
+ def download_file(self, request, file_id, pk=None):
+ test = self.get_object()
+ # Get the file object
+ file_object_qs = test.files.filter(id=file_id)
+ file_object = (
+ file_object_qs.first() if len(file_object_qs) > 0 else None
+ )
+ if file_object is None:
+ return Response(
+ {"error": "File ID not associated with Test"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ # Get the path of the file in media root
+ file_path = f"{settings.MEDIA_ROOT}/{file_object.file.url.lstrip(settings.MEDIA_URL)}"
+ file_handle = open(file_path, "rb")
+ # send file
+ response = FileResponse(
+ file_handle,
+ content_type=f"{mimetypes.guess_type(file_path)}",
+ status=status.HTTP_200_OK,
+ )
+ response["Content-Length"] = file_object.file.size
+ response[
+ "Content-Disposition"
+ ] = f'attachment; filename="{file_object.file.name}"'
+
+ return response
+
+
+# Authorization: authenticated, configuration
+class TestTypesViewSet(
+ mixins.UpdateModelMixin,
+ mixins.CreateModelMixin,
+ viewsets.ReadOnlyModelViewSet,
+):
serializer_class = serializers.TestTypeSerializer
queryset = Test_Type.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('name',)
+ filterset_fields = [
+ "name",
+ ]
permission_classes = (IsAuthenticated, DjangoModelPermissions)
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class TestImportViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class TestImportViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.TestImportSerializer
queryset = Test_Import.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('test', 'findings_affected', 'version', 'branch_tag', 'build_id', 'commit_hash', 'test_import_finding_action__action',
- 'test_import_finding_action__finding', 'test_import_finding_action__created')
- swagger_schema = prefetch.get_prefetch_schema(["test_imports_list", "test_imports_read"], serializers.TestImportSerializer). \
- to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasTestImportPermission)
+ filterset_fields = [
+ "test",
+ "findings_affected",
+ "version",
+ "branch_tag",
+ "build_id",
+ "commit_hash",
+ "test_import_finding_action__action",
+ "test_import_finding_action__finding",
+ "test_import_finding_action__created",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["test_imports_list", "test_imports_read"],
+ serializers.TestImportSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasTestImportPermission,
+ )
def get_queryset(self):
- return get_authorized_test_imports(Permissions.Test_View).prefetch_related(
- 'test_import_finding_action_set',
- 'findings_affected',
- 'findings_affected__endpoints',
- 'findings_affected__endpoint_status',
- 'findings_affected__finding_meta',
- 'findings_affected__jira_issue',
- 'findings_affected__burprawrequestresponse_set',
- 'findings_affected__jira_issue',
- 'findings_affected__jira_issue',
- 'findings_affected__jira_issue',
- 'findings_affected__reviewers',
- 'findings_affected__notes',
- 'findings_affected__notes__author',
- 'findings_affected__notes__history',
- 'findings_affected__files',
- 'findings_affected__found_by',
- 'findings_affected__tags',
- 'findings_affected__risk_acceptance_set',
- 'test',
- 'test__tags',
- 'test__notes',
- 'test__notes__author',
- 'test__files',
- 'test__test_type',
- 'test__engagement',
- 'test__environment',
- 'test__engagement__product',
- 'test__engagement__product__prod_type')
-
-
-# Authorization: superuser
-class ToolConfigurationsViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+ return get_authorized_test_imports(
+ Permissions.Test_View
+ ).prefetch_related(
+ "test_import_finding_action_set",
+ "findings_affected",
+ "findings_affected__endpoints",
+ "findings_affected__status_finding",
+ "findings_affected__finding_meta",
+ "findings_affected__jira_issue",
+ "findings_affected__burprawrequestresponse_set",
+ "findings_affected__jira_issue",
+ "findings_affected__jira_issue",
+ "findings_affected__jira_issue",
+ "findings_affected__reviewers",
+ "findings_affected__notes",
+ "findings_affected__notes__author",
+ "findings_affected__notes__history",
+ "findings_affected__files",
+ "findings_affected__found_by",
+ "findings_affected__tags",
+ "findings_affected__risk_acceptance_set",
+ "test",
+ "test__tags",
+ "test__notes",
+ "test__notes__author",
+ "test__files",
+ "test__test_type",
+ "test__engagement",
+ "test__environment",
+ "test__engagement__product",
+ "test__engagement__product__prod_type",
+ )
+
+
+# Authorization: configurations
+class ToolConfigurationsViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ToolConfigurationSerializer
queryset = Tool_Configuration.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'tool_type', 'url', 'authentication_type')
- permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
+ filterset_fields = [
+ "id",
+ "name",
+ "tool_type",
+ "url",
+ "authentication_type",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["tool_configurations_list", "tool_configurations_read"],
+ serializers.ToolConfigurationSerializer,
+ ).to_schema()
+ permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,)
# Authorization: object-based
-class ToolProductSettingsViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ToolProductSettingsViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.ToolProductSettingsSerializer
queryset = Tool_Product_Settings.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'product', 'tool_configuration',
- 'tool_project_id', 'url')
- permission_classes = (IsAuthenticated, permissions.UserHasToolProductSettingsPermission)
+ filterset_fields = [
+ "id",
+ "name",
+ "product",
+ "tool_configuration",
+ "tool_project_id",
+ "url",
+ ]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["tool_configurations_list", "tool_configurations_read"],
+ serializers.ToolConfigurationSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasToolProductSettingsPermission,
+ )
def get_queryset(self):
return get_authorized_tool_product_settings(Permissions.Product_View)
# Authorization: configuration
-class ToolTypesViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class ToolTypesViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.ToolTypeSerializer
queryset = Tool_Type.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'description')
- permission_classes = (permissions.UserHasConfigurationPermissionStaff, )
+ filterset_fields = ["id", "name", "description"]
+ permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,)
-# Authorization: authenticated users
-class RegulationsViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: authenticated, configuration
+class RegulationsViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.RegulationSerializer
queryset = Regulation.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'description')
+ filterset_fields = ["id", "name", "description"]
permission_classes = (IsAuthenticated, DjangoModelPermissions)
# Authorization: configuration
-class UsersViewSet(mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class UsersViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.UserSerializer
queryset = User.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'username', 'first_name', 'last_name', 'email')
- permission_classes = (permissions.UserHasConfigurationPermissionStaff, )
+ filterset_fields = [
+ "id",
+ "username",
+ "first_name",
+ "last_name",
+ "email",
+ "is_active",
+ "is_superuser",
+ ]
+ permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,)
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if request.user == instance:
- return Response('Users may not delete themselves', status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ "Users may not delete themselves",
+ status=status.HTTP_400_BAD_REQUEST,
+ )
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
# Authorization: superuser
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class UserContactInfoViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- viewsets.GenericViewSet):
+class UserContactInfoViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.UserContactInfoSerializer
queryset = UserContactInfo.objects.all()
- swagger_schema = prefetch.get_prefetch_schema(["user_contact_infos_list", "user_contact_infos_read"],
- serializers.UserContactInfoSerializer).to_schema()
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["user_contact_infos_list", "user_contact_infos_read"],
+ serializers.UserContactInfoSerializer,
+ ).to_schema()
filter_backends = (DjangoFilterBackend,)
- filter_fields = '__all__'
+ filterset_fields = "__all__"
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
# Authorization: authenticated users
class UserProfileView(GenericAPIView):
- permission_classes = (IsAuthenticated, )
+ permission_classes = (IsAuthenticated,)
pagination_class = None
serializer_class = serializers.UserProfileSerializer
@swagger_auto_schema(
- method='get',
- responses={status.HTTP_200_OK: serializers.UserProfileSerializer}
+ method="get",
+ responses={status.HTTP_200_OK: serializers.UserProfileSerializer},
+ )
+ @action(
+ detail=True, methods=["get"], filter_backends=[], pagination_class=None
)
- @action(detail=True, methods=["get"],
- filter_backends=[], pagination_class=None)
def get(self, request, format=None):
user = get_current_user()
- user_contact_info = user.usercontactinfo if hasattr(user, 'usercontactinfo') else None
- global_role = user.global_role if hasattr(user, 'global_role') else None
+ user_contact_info = (
+ user.usercontactinfo if hasattr(user, "usercontactinfo") else None
+ )
+ global_role = (
+ user.global_role if hasattr(user, "global_role") else None
+ )
dojo_group_member = Dojo_Group_Member.objects.filter(user=user)
product_type_member = Product_Type_Member.objects.filter(user=user)
product_member = Product_Member.objects.filter(user=user)
serializer = serializers.UserProfileSerializer(
- {"user": user,
- "user_contact_info": user_contact_info,
- "global_role": global_role,
- "dojo_group_member": dojo_group_member,
- "product_type_member": product_type_member,
- "product_member": product_member}, many=False)
+ {
+ "user": user,
+ "user_contact_info": user_contact_info,
+ "global_role": global_role,
+ "dojo_group_member": dojo_group_member,
+ "product_type_member": product_type_member,
+ "product_member": product_member,
+ },
+ many=False,
+ )
return Response(serializer.data)
# Authorization: authenticated users, DjangoModelPermissions
-class ImportScanView(mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class ImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet):
"""
Imports a scan report into an engagement or product.
@@ -1912,26 +2984,52 @@ class ImportScanView(mixins.CreateModelMixin,
When using names you can let the importer automatically create Engagements, Products and Product_Types
by using `auto_create_context=True`.
+
+ When `auto_create_context` is set to `True` you can use `deduplication_on_engagement` to restrict deduplication for
+ imported Findings to the newly created Engagement.
"""
+
serializer_class = serializers.ImportScanSerializer
parser_classes = [MultiPartParser]
queryset = Test.objects.none()
permission_classes = (IsAuthenticated, permissions.UserHasImportPermission)
def perform_create(self, serializer):
- _, _, _, engagement_id, engagement_name, product_name, product_type_name, auto_create_context = serializers.get_import_meta_data_from_dict(serializer.validated_data)
+ (
+ _,
+ _,
+ _,
+ engagement_id,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = serializers.get_import_meta_data_from_dict(
+ serializer.validated_data
+ )
product = get_target_product_if_exists(product_name)
- engagement = get_target_engagement_if_exists(engagement_id, engagement_name, product)
-
- # when using auto_create_context, the engagement or product may not have been created yet
- jira_driver = engagement if engagement else product if product else None
- jira_project = jira_helper.get_jira_project(jira_driver) if jira_driver else None
-
- push_to_jira = serializer.validated_data.get('push_to_jira')
- if get_system_setting('enable_jira') and jira_project:
+ engagement = get_target_engagement_if_exists(
+ engagement_id, engagement_name, product
+ )
+
+ # when using auto_create_context, the engagement or product may not
+ # have been created yet
+ jira_driver = (
+ engagement if engagement else product if product else None
+ )
+ jira_project = (
+ jira_helper.get_jira_project(jira_driver) if jira_driver else None
+ )
+
+ push_to_jira = serializer.validated_data.get("push_to_jira")
+ if get_system_setting("enable_jira") and jira_project:
push_to_jira = push_to_jira or jira_project.push_all_issues
- logger.debug('push_to_jira: %s', serializer.validated_data.get('push_to_jira'))
+ logger.debug(
+ "push_to_jira: %s", serializer.validated_data.get("push_to_jira")
+ )
serializer.save(push_to_jira=push_to_jira)
def get_queryset(self):
@@ -1939,10 +3037,11 @@ def get_queryset(self):
# Authorization: authenticated users, DjangoModelPermissions
-class EndpointMetaImporterView(mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class EndpointMetaImporterView(
+ mixins.CreateModelMixin, viewsets.GenericViewSet
+):
"""
- Imports a CSV file into a product to propogate arbitrary meta and tags on endpoints.
+ Imports a CSV file into a product to propagate arbitrary meta and tags on endpoints.
By Names:
- Provide `product_name` of existing product
@@ -1952,10 +3051,14 @@ class EndpointMetaImporterView(mixins.CreateModelMixin,
In this scenario Defect Dojo will look up the product by the provided details.
"""
+
serializer_class = serializers.EndpointMetaImporterSerializer
parser_classes = [MultiPartParser]
queryset = Product.objects.all()
- permission_classes = (IsAuthenticated, permissions.UserHasMetaImportPermission)
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasMetaImportPermission,
+ )
def perform_create(self, serializer):
serializer.save()
@@ -1964,68 +3067,77 @@ def get_queryset(self):
return get_authorized_products(Permissions.Endpoint_Edit)
-# Authorization: staff users
-class LanguageTypeViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.CreateModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: configuration
+class LanguageTypeViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.LanguageTypeSerializer
queryset = Language_Type.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'language', 'color')
- permission_classes = (IsAdminUser, DjangoModelPermissions)
+ filterset_fields = ["id", "language", "color"]
+ permission_classes = (permissions.UserHasConfigurationPermissionStaff,)
# Authorization: object-based
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class LanguageViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.UpdateModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class LanguageViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.LanguageSerializer
queryset = Languages.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'language', 'product')
- swagger_schema = prefetch.get_prefetch_schema(["languages_list", "languages_read"],
- serializers.LanguageSerializer).to_schema()
- permission_classes = (IsAuthenticated, permissions.UserHasLanguagePermission)
+ filterset_fields = ["id", "language", "product"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["languages_list", "languages_read"], serializers.LanguageSerializer
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasLanguagePermission,
+ )
def get_queryset(self):
return get_authorized_languages(Permissions.Language_View).distinct()
# Authorization: object-based
-class ImportLanguagesView(mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class ImportLanguagesView(mixins.CreateModelMixin, viewsets.GenericViewSet):
serializer_class = serializers.ImportLanguagesSerializer
parser_classes = [MultiPartParser]
queryset = Product.objects.none()
- permission_classes = (IsAuthenticated, permissions.UserHasLanguagePermission)
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasLanguagePermission,
+ )
def get_queryset(self):
return get_authorized_products(Permissions.Language_Add)
-# Authorization: authenticated users, DjangoModelPermissions
-class ReImportScanView(mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: object-based
+class ReImportScanView(mixins.CreateModelMixin, viewsets.GenericViewSet):
"""
Reimports a scan report into an existing test.
@@ -2048,58 +3160,106 @@ class ReImportScanView(mixins.CreateModelMixin,
When using names you can let the importer automatically create Engagements, Products and Product_Types
by using `auto_create_context=True`.
+
+ When `auto_create_context` is set to `True` you can use `deduplication_on_engagement` to restrict deduplication for
+ imported Findings to the newly created Engagement.
"""
+
serializer_class = serializers.ReImportScanSerializer
parser_classes = [MultiPartParser]
queryset = Test.objects.none()
- permission_classes = (IsAuthenticated, permissions.UserHasReimportPermission)
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasReimportPermission,
+ )
def get_queryset(self):
return get_authorized_tests(Permissions.Import_Scan_Result)
def perform_create(self, serializer):
- test_id, test_title, scan_type, _, engagement_name, product_name, product_type_name, auto_create_context = serializers.get_import_meta_data_from_dict(serializer.validated_data)
+ (
+ test_id,
+ test_title,
+ scan_type,
+ _,
+ engagement_name,
+ product_name,
+ product_type_name,
+ auto_create_context,
+ deduplication_on_engagement,
+ do_not_reactivate,
+ ) = serializers.get_import_meta_data_from_dict(
+ serializer.validated_data
+ )
product = get_target_product_if_exists(product_name)
- engagement = get_target_engagement_if_exists(None, engagement_name, product)
- test = get_target_test_if_exists(test_id, test_title, scan_type, engagement)
-
- # when using auto_create_context, the engagement or product may not have been created yet
- jira_driver = test if test else engagement if engagement else product if product else None
- jira_project = jira_helper.get_jira_project(jira_driver) if jira_driver else None
-
- push_to_jira = serializer.validated_data.get('push_to_jira')
- if get_system_setting('enable_jira') and jira_project:
+ engagement = get_target_engagement_if_exists(
+ None, engagement_name, product
+ )
+ test = get_target_test_if_exists(
+ test_id, test_title, scan_type, engagement
+ )
+
+ # when using auto_create_context, the engagement or product may not
+ # have been created yet
+ jira_driver = (
+ test
+ if test
+ else engagement
+ if engagement
+ else product
+ if product
+ else None
+ )
+ jira_project = (
+ jira_helper.get_jira_project(jira_driver) if jira_driver else None
+ )
+
+ push_to_jira = serializer.validated_data.get("push_to_jira")
+ if get_system_setting("enable_jira") and jira_project:
push_to_jira = push_to_jira or jira_project.push_all_issues
- logger.debug('push_to_jira: %s', serializer.validated_data.get('push_to_jira'))
+ logger.debug(
+ "push_to_jira: %s", serializer.validated_data.get("push_to_jira")
+ )
serializer.save(push_to_jira=push_to_jira)
-# Authorization: staff
-class NoteTypeViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+# Authorization: configuration
+class NoteTypeViewSet(
+ DojoModelViewSet,
+):
serializer_class = serializers.NoteTypeSerializer
queryset = Note_Type.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'name', 'description', 'is_single', 'is_active', 'is_mandatory')
- permission_classes = (IsAdminUser, DjangoModelPermissions)
+ filterset_fields = [
+ "id",
+ "name",
+ "description",
+ "is_single",
+ "is_active",
+ "is_mandatory",
+ ]
+ permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,)
# Authorization: superuser
-class NotesViewSet(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class NotesViewSet(
+ mixins.UpdateModelMixin,
+ viewsets.ReadOnlyModelViewSet,
+):
serializer_class = serializers.NoteSerializer
queryset = Notes.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'entry', 'author',
- 'private', 'date', 'edited',
- 'edit_time', 'editor')
+ filterset_fields = [
+ "id",
+ "entry",
+ "author",
+ "private",
+ "date",
+ "edited",
+ "edit_time",
+ "editor",
+ ]
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
@@ -2111,15 +3271,6 @@ def report_generate(request, obj, options):
test = None
endpoint = None
endpoints = None
- endpoint_all_findings = None
- endpoint_monthly_counts = None
- endpoint_active_findings = None
- accepted_findings = None
- open_findings = None
- closed_findings = None
- verified_findings = None
- report_title = None
- report_subtitle = None
include_finding_notes = False
include_finding_images = False
@@ -2127,34 +3278,37 @@ def report_generate(request, obj, options):
include_table_of_contents = False
report_info = "Generated By %s on %s" % (
- user.get_full_name(), (timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")))
+ user.get_full_name(),
+ (timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")),
+ )
# generate = "_generate" in request.GET
report_name = str(obj)
- report_type = type(obj).__name__
- include_finding_notes = options.get('include_finding_notes', False)
- include_finding_images = options.get('include_finding_images', False)
- include_executive_summary = options.get('include_executive_summary', False)
- include_table_of_contents = options.get('include_table_of_contents', False)
+ include_finding_notes = options.get("include_finding_notes", False)
+ include_finding_images = options.get("include_finding_images", False)
+ include_executive_summary = options.get("include_executive_summary", False)
+ include_table_of_contents = options.get("include_table_of_contents", False)
if type(obj).__name__ == "Product_Type":
product_type = obj
report_name = "Product Type Report: " + str(product_type)
- report_title = "Product Type Report"
- report_subtitle = str(product_type)
-
- findings = ReportFindingFilter(request.GET, prod_type=product_type, queryset=prefetch_related_findings_for_report(Finding.objects.filter(
- test__engagement__product__prod_type=product_type)))
- products = Product.objects.filter(prod_type=product_type,
- engagement__test__finding__in=findings.qs).distinct()
- engagements = Engagement.objects.filter(product__prod_type=product_type,
- test__finding__in=findings.qs).distinct()
- tests = Test.objects.filter(engagement__product__prod_type=product_type,
- finding__in=findings.qs).distinct()
+
+ findings = ReportFindingFilter(
+ request.GET,
+ prod_type=product_type,
+ queryset=prefetch_related_findings_for_report(
+ Finding.objects.filter(
+ test__engagement__product__prod_type=product_type
+ )
+ ),
+ )
+
if len(findings.qs) > 0:
- start_date = timezone.make_aware(datetime.combine(findings.qs.last().date, datetime.min.time()))
+ start_date = timezone.make_aware(
+ datetime.combine(findings.qs.last().date, datetime.min.time())
+ )
else:
start_date = timezone.now()
@@ -2165,116 +3319,110 @@ def report_generate(request, obj, options):
# include current month
months_between += 1
- endpoint_monthly_counts = get_period_counts_legacy(findings.qs.order_by('numerical_severity'), findings.qs.order_by('numerical_severity'), None,
- months_between, start_date,
- relative_delta='months')
-
elif type(obj).__name__ == "Product":
product = obj
report_name = "Product Report: " + str(product)
- report_title = "Product Report"
- report_subtitle = str(product)
- findings = ReportFindingFilter(request.GET, product=product, queryset=prefetch_related_findings_for_report(Finding.objects.filter(
- test__engagement__product=product)))
- ids = set(finding.id for finding in findings.qs)
- engagements = Engagement.objects.filter(test__finding__id__in=ids).distinct()
- tests = Test.objects.filter(finding__id__in=ids).distinct()
- ids = get_endpoint_ids(Endpoint.objects.filter(product=product).distinct())
+
+ findings = ReportFindingFilter(
+ request.GET,
+ product=product,
+ queryset=prefetch_related_findings_for_report(
+ Finding.objects.filter(test__engagement__product=product)
+ ),
+ )
+ ids = get_endpoint_ids(
+ Endpoint.objects.filter(product=product).distinct()
+ )
endpoints = Endpoint.objects.filter(id__in=ids)
elif type(obj).__name__ == "Engagement":
engagement = obj
- findings = ReportFindingFilter(request.GET, engagement=engagement,
- queryset=prefetch_related_findings_for_report(Finding.objects.filter(test__engagement=engagement)))
+ findings = ReportFindingFilter(
+ request.GET,
+ engagement=engagement,
+ queryset=prefetch_related_findings_for_report(
+ Finding.objects.filter(test__engagement=engagement)
+ ),
+ )
report_name = "Engagement Report: " + str(engagement)
- report_title = "Engagement Report"
- report_subtitle = str(engagement)
-
ids = set(finding.id for finding in findings.qs)
- tests = Test.objects.filter(finding__id__in=ids).distinct()
- ids = get_endpoint_ids(Endpoint.objects.filter(product=engagement.product).distinct())
+ ids = get_endpoint_ids(
+ Endpoint.objects.filter(product=engagement.product).distinct()
+ )
endpoints = Endpoint.objects.filter(id__in=ids)
elif type(obj).__name__ == "Test":
test = obj
- findings = ReportFindingFilter(request.GET, engagement=test.engagement,
- queryset=prefetch_related_findings_for_report(Finding.objects.filter(test=test)))
- filename = "test_finding_report.pdf"
- template = "dojo/test_pdf_report.html"
+ findings = ReportFindingFilter(
+ request.GET,
+ engagement=test.engagement,
+ queryset=prefetch_related_findings_for_report(
+ Finding.objects.filter(test=test)
+ ),
+ )
report_name = "Test Report: " + str(test)
- report_title = "Test Report"
- report_subtitle = str(test)
elif type(obj).__name__ == "Endpoint":
endpoint = obj
host = endpoint.host
report_name = "Endpoint Report: " + host
- report_type = "Endpoint"
- endpoints = Endpoint.objects.filter(host=host,
- product=endpoint.product).distinct()
- report_title = "Endpoint Report"
- report_subtitle = host
- findings = ReportFindingFilter(request.GET,
- queryset=prefetch_related_findings_for_report(Finding.objects.filter(endpoints__in=endpoints)))
+ endpoints = Endpoint.objects.filter(
+ host=host, product=endpoint.product
+ ).distinct()
+ findings = ReportFindingFilter(
+ request.GET,
+ queryset=prefetch_related_findings_for_report(
+ Finding.objects.filter(endpoints__in=endpoints)
+ ),
+ )
elif type(obj).__name__ == "CastTaggedQuerySet":
- findings = ReportFindingFilter(request.GET,
- queryset=prefetch_related_findings_for_report(obj).distinct())
+ findings = ReportFindingFilter(
+ request.GET,
+ queryset=prefetch_related_findings_for_report(obj).distinct(),
+ )
- report_name = 'Finding'
- report_type = 'Finding'
- report_title = "Finding Report"
- report_subtitle = ''
+ report_name = "Finding"
else:
raise Http404()
result = {
- 'product_type': product_type,
- 'product': product,
- 'engagement': engagement,
- 'report_name': report_name,
- 'report_info': report_info,
- 'test': test,
- 'endpoint': endpoint,
- 'endpoints': endpoints,
- 'findings': findings.qs.order_by('numerical_severity'),
- 'include_table_of_contents': include_table_of_contents,
- 'user': user,
- 'team_name': settings.TEAM_NAME,
- 'title': 'Generate Report',
- 'user_id': request.user.id,
- 'host': report_url_resolver(request),
+ "product_type": product_type,
+ "product": product,
+ "engagement": engagement,
+ "report_name": report_name,
+ "report_info": report_info,
+ "test": test,
+ "endpoint": endpoint,
+ "endpoints": endpoints,
+ "findings": findings.qs.order_by("numerical_severity"),
+ "include_table_of_contents": include_table_of_contents,
+ "user": user,
+ "team_name": settings.TEAM_NAME,
+ "title": "Generate Report",
+ "user_id": request.user.id,
+ "host": report_url_resolver(request),
}
finding_notes = []
finding_files = []
if include_finding_images:
- for finding in findings.qs.order_by('numerical_severity'):
+ for finding in findings.qs.order_by("numerical_severity"):
files = finding.files.all()
if files:
- finding_files.append(
- {
- "finding_id": finding,
- "files": files
- }
- )
- result['finding_files'] = finding_files
+ finding_files.append({"finding_id": finding, "files": files})
+ result["finding_files"] = finding_files
if include_finding_notes:
- for finding in findings.qs.order_by('numerical_severity'):
+ for finding in findings.qs.order_by("numerical_severity"):
notes = finding.notes.filter(private=False)
if notes:
- finding_notes.append(
- {
- "finding_id": finding,
- "notes": notes
- }
- )
- result['finding_notes'] = finding_notes
+ finding_notes.append({"finding_id": finding, "notes": notes})
+ result["finding_notes"] = finding_notes
# Generating Executive summary based on obj type
if include_executive_summary and type(obj).__name__ != "Endpoint":
@@ -2306,7 +3454,7 @@ def report_generate(request, obj, options):
if eng.test_set.all():
for t in eng.test_set.all():
test_type_name = t.test_type.name
- if test.environment:
+ if t.environment:
test_environment_name = t.environment.name
test_target_start = t.target_start
if t.target_end:
@@ -2383,28 +3531,29 @@ def report_generate(request, obj, options):
pass # do nothing
executive_summary = {
- 'engagement_name': engagement_name,
- 'engagement_target_start': engagement_target_start,
- 'engagement_target_end': engagement_target_end,
- 'test_type_name': test_type_name,
- 'test_target_start': test_target_start,
- 'test_target_end': test_target_end,
- 'test_environment_name': test_environment_name,
- 'test_strategy_ref': test_strategy_ref,
- 'total_findings': total_findings
+ "engagement_name": engagement_name,
+ "engagement_target_start": engagement_target_start,
+ "engagement_target_end": engagement_target_end,
+ "test_type_name": test_type_name,
+ "test_target_start": test_target_start,
+ "test_target_end": test_target_end,
+ "test_environment_name": test_environment_name,
+ "test_strategy_ref": test_strategy_ref,
+ "total_findings": total_findings,
}
# End of executive summary generation
- result['executive_summary'] = executive_summary
+ result["executive_summary"] = executive_summary
return result
# Authorization: superuser
-class SystemSettingsViewSet(mixins.ListModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
- """ Basic control over System Settings. Use 'id' 1 for PUT, PATCH operations """
+class SystemSettingsViewSet(
+ mixins.ListModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet
+):
+ """Basic control over System Settings. Use 'id' 1 for PUT, PATCH operations"""
+
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
serializer_class = serializers.SystemSettingsSerializer
queryset = System_Settings.objects.all()
@@ -2412,59 +3561,191 @@ class SystemSettingsViewSet(mixins.ListModelMixin,
# Authorization: superuser
@extend_schema_view(
- list=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
+ list=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
+ ),
+ retrieve=extend_schema(
+ parameters=[
+ OpenApiParameter(
+ "prefetch",
+ OpenApiTypes.STR,
+ OpenApiParameter.QUERY,
+ required=False,
+ description="List of fields for which to prefetch model instances and add those to the response",
+ ),
+ ],
),
- retrieve=extend_schema(parameters=[
- OpenApiParameter("prefetch", OpenApiTypes.STR, OpenApiParameter.QUERY, required=False,
- description="List of fields for which to prefetch model instances and add those to the response"),
- ],
- )
)
-class NotificationsViewSet(prefetch.PrefetchListMixin,
- prefetch.PrefetchRetrieveMixin,
- mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- mixins.UpdateModelMixin,
- viewsets.GenericViewSet):
+class NotificationsViewSet(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.NotificationsSerializer
queryset = Notifications.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'user', 'product')
+ filterset_fields = ["id", "user", "product", "template"]
permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
- swagger_schema = prefetch.get_prefetch_schema(["notifications_list", "notifications_read"],
- serializers.NotificationsSerializer).to_schema()
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["notifications_list", "notifications_read"],
+ serializers.NotificationsSerializer,
+ ).to_schema()
-class EngagementPresetsViewset(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class EngagementPresetsViewset(
+ PrefetchDojoModelViewSet,
+):
serializer_class = serializers.EngagementPresetsSerializer
queryset = Engagement_Presets.objects.none()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'title', 'product')
-
- permission_classes = (IsAuthenticated, permissions.UserHasEngagementPresetPermission)
+ filterset_fields = ["id", "title", "product"]
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["engagement_presets_list", "engagement_presets_read"],
+ serializers.EngagementPresetsSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasEngagementPresetPermission,
+ )
def get_queryset(self):
return get_authorized_engagement_presets(Permissions.Product_View)
-class NetworkLocationsViewset(mixins.ListModelMixin,
- mixins.RetrieveModelMixin,
- mixins.UpdateModelMixin,
- mixins.DestroyModelMixin,
- mixins.CreateModelMixin,
- viewsets.GenericViewSet):
+class EngagementCheckListViewset(
+ PrefetchDojoModelViewSet,
+):
+ serializer_class = serializers.EngagementCheckListSerializer
+ queryset = Check_List.objects.none()
+ filter_backends = (DjangoFilterBackend,)
+ swagger_schema = prefetch.get_prefetch_schema(
+ ["engagement_checklists_list", "engagement_checklists_read"],
+ serializers.EngagementCheckListSerializer,
+ ).to_schema()
+ permission_classes = (
+ IsAuthenticated,
+ permissions.UserHasEngagementPermission,
+ )
+
+ def get_queryset(self):
+ return get_authorized_engagement_checklists(Permissions.Product_View)
+
+
+class NetworkLocationsViewset(
+ DojoModelViewSet,
+):
serializer_class = serializers.NetworkLocationsSerializer
queryset = Network_Locations.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('id', 'location')
+ filterset_fields = ["id", "location"]
permission_classes = (IsAuthenticated, DjangoModelPermissions)
+
+
+# Authorization: superuser
+class ConfigurationPermissionViewSet(
+ viewsets.ReadOnlyModelViewSet,
+):
+ serializer_class = serializers.ConfigurationPermissionSerializer
+ queryset = Permission.objects.filter(
+ codename__in=get_configuration_permissions_codenames()
+ )
+ filter_backends = (DjangoFilterBackend,)
+ filterset_fields = ["id", "name", "codename"]
+ permission_classes = (permissions.IsSuperUser, DjangoModelPermissions)
+
+
+class SLAConfigurationViewset(
+ DojoModelViewSet,
+):
+ serializer_class = serializers.SLAConfigurationSerializer
+ queryset = SLA_Configuration.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (IsAuthenticated, DjangoModelPermissions)
+
+
+class QuestionnaireQuestionViewSet(
+ viewsets.ReadOnlyModelViewSet,
+ dojo_mixins.QuestionSubClassFieldsMixin,
+):
+ serializer_class = serializers.QuestionnaireQuestionSerializer
+ queryset = Question.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (
+ permissions.UserHasEngagementPermission,
+ DjangoModelPermissions,
+ )
+
+
+class QuestionnaireAnswerViewSet(
+ viewsets.ReadOnlyModelViewSet,
+ dojo_mixins.AnswerSubClassFieldsMixin,
+):
+ serializer_class = serializers.QuestionnaireAnswerSerializer
+ queryset = Answer.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (
+ permissions.UserHasEngagementPermission,
+ DjangoModelPermissions,
+ )
+
+
+class QuestionnaireGeneralSurveyViewSet(
+ viewsets.ReadOnlyModelViewSet,
+):
+ serializer_class = serializers.QuestionnaireGeneralSurveySerializer
+ queryset = General_Survey.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (
+ permissions.UserHasEngagementPermission,
+ DjangoModelPermissions,
+ )
+
+
+class QuestionnaireEngagementSurveyViewSet(
+ viewsets.ReadOnlyModelViewSet
+):
+ serializer_class = serializers.QuestionnaireEngagementSurveySerializer
+ queryset = Engagement_Survey.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (
+ permissions.UserHasEngagementPermission,
+ DjangoModelPermissions,
+ )
+
+
+class QuestionnaireAnsweredSurveyViewSet(
+ prefetch.PrefetchListMixin,
+ prefetch.PrefetchRetrieveMixin,
+ viewsets.ReadOnlyModelViewSet,
+):
+ serializer_class = serializers.QuestionnaireAnsweredSurveySerializer
+ queryset = Answered_Survey.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ permission_classes = (
+ permissions.UserHasEngagementPermission,
+ DjangoModelPermissions,
+ )
+ swagger_schema = prefetch.get_prefetch_schema(
+ [
+ "questionnaire_answered_questionnaires_list",
+ "questionnaire_answered_questionnaires_read",
+ ],
+ serializers.QuestionnaireAnsweredSurveySerializer,
+ ).to_schema()
+
+
+# Authorization: configuration
+class AnnouncementViewSet(
+ DojoModelViewSet
+):
+ serializer_class = serializers.AnnouncementSerializer
+ queryset = Announcement.objects.all()
+ filter_backends = (DjangoFilterBackend,)
+ filterset_fields = "__all__"
+ permission_classes = (permissions.UserHasConfigurationPermissionStaff,)
diff --git a/dojo/apps.py b/dojo/apps.py
index ddb86866e68..6c84a420de8 100644
--- a/dojo/apps.py
+++ b/dojo/apps.py
@@ -30,7 +30,7 @@ def ready(self):
watson.register(self.get_model('Test'), fields=get_model_fields_with_extra(self.get_model('Test'), ('id', 'engagement__product__name', )), store=('engagement__product__name', )) # test_type__name?
watson.register(self.get_model('Finding'), fields=get_model_fields_with_extra(self.get_model('Finding'), ('id', 'url', 'unique_id_from_tool', 'test__engagement__product__name', 'jira_issue__jira_key', )),
- store=('cve', 'status', 'jira_issue__jira_key', 'test__engagement__product__name', 'severity', 'severity_display', 'latest_note'))
+ store=('status', 'jira_issue__jira_key', 'test__engagement__product__name', 'severity', 'severity_display', 'latest_note'))
# some thoughts on Finding fields that are not indexed yet:
# CWE can't be indexed as it is an integer
@@ -62,12 +62,20 @@ def ready(self):
watson.register(self.get_model('Endpoint'), store=('product__name', )) # add product name also?
watson.register(self.get_model('Engagement'), fields=get_model_fields_with_extra(self.get_model('Engagement'), ('id', 'product__name', )), store=('product__name', ))
watson.register(self.get_model('App_Analysis'))
+ watson.register(self.get_model('Vulnerability_Id'), store=('finding__test__engagement__product__name', ))
# YourModel = self.get_model("YourModel")
# watson.register(YourModel)
register_check(check_configuration_deduplication, 'dojo')
+ # Load any signals here that will be ready for runtime
+ # Importing the signals file is good enough if using the reciever decorator
+ import dojo.announcement.signals # noqa
+ import dojo.product.signals # noqa
+ import dojo.test.signals # noqa
+ import dojo.sla_config.helpers # noqa
+
def get_model_fields_with_extra(model, extra_fields=()):
return get_model_fields(get_model_default_fields(model), extra_fields)
diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py
index 7ddaed0d041..69f3884a4ce 100644
--- a/dojo/authorization/authorization.py
+++ b/dojo/authorization/authorization.py
@@ -1,139 +1,248 @@
from django.core.exceptions import PermissionDenied
-from django.conf import settings
from dojo.request_cache import cache_for_request
-from dojo.authorization.roles_permissions import Permissions, Roles, get_global_roles_with_permissions, get_roles_with_permissions
-from dojo.models import Product_Type, Product_Type_Member, Product, Product_Member, Engagement, \
- Test, Finding, Endpoint, Finding_Group, Product_Group, Product_Type_Group, Dojo_Group, Dojo_Group_Member, \
- Languages, App_Analysis, Stub_Finding, Product_API_Scan_Configuration
-
-
-def user_has_configuration_permission(user, permission, legacy=None):
-
+from dojo.authorization.roles_permissions import (
+ Permissions,
+ Roles,
+ get_global_roles_with_permissions,
+ get_roles_with_permissions,
+)
+from dojo.models import (
+ Product_Type,
+ Product_Type_Member,
+ Product,
+ Product_Member,
+ Engagement,
+ Test,
+ Finding,
+ Endpoint,
+ Finding_Group,
+ Product_Group,
+ Product_Type_Group,
+ Dojo_Group,
+ Dojo_Group_Member,
+ Languages,
+ App_Analysis,
+ Stub_Finding,
+ Product_API_Scan_Configuration,
+ Cred_Mapping,
+)
+
+
+def user_has_configuration_permission(user, permission):
if not user:
return False
- if settings.FEATURE_CONFIGURATION_AUTHORIZATION:
- return user.has_perm(permission)
- else:
- if legacy == 'staff':
- return user.is_staff
- elif legacy == 'superuser':
- return user.is_superuser
- else:
- raise Exception(f'{legacy} is not allowed for parameter legacy')
+ if user.is_anonymous:
+ return False
+
+ return user.has_perm(permission)
def user_has_permission(user, obj, permission):
+ if user.is_anonymous:
+ return False
if user.is_superuser:
return True
- if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
- return True
-
if isinstance(obj, Product_Type) or isinstance(obj, Product):
- # Global roles are only relevant for product types, products and their dependent objects
+ # Global roles are only relevant for product types, products and their
+ # dependent objects
if user_has_global_permission(user, permission):
return True
if isinstance(obj, Product_Type):
- # Check if the user has a role for the product type with the requested permissions
+ # Check if the user has a role for the product type with the requested
+ # permissions
member = get_product_type_member(user, obj)
- if member is not None and role_has_permission(member.role.id, permission):
+ if member is not None and role_has_permission(
+ member.role.id, permission
+ ):
return True
- # Check if the user is in a group with a role for the product type with the requested permissions
+ # Check if the user is in a group with a role for the product type with
+ # the requested permissions
for product_type_group in get_product_type_groups(user, obj):
if role_has_permission(product_type_group.role.id, permission):
return True
return False
- elif (isinstance(obj, Product) and
- permission.value >= Permissions.Product_View.value):
+ elif (
+ isinstance(obj, Product)
+ and permission.value >= Permissions.Product_View.value
+ ):
# Products inherit permissions of their product type
if user_has_permission(user, obj.prod_type, permission):
return True
- # Check if the user has a role for the product with the requested permissions
+ # Check if the user has a role for the product with the requested
+ # permissions
member = get_product_member(user, obj)
- if member is not None and role_has_permission(member.role.id, permission):
+ if member is not None and role_has_permission(
+ member.role.id, permission
+ ):
return True
- # Check if the user is in a group with a role for the product with the requested permissions
+ # Check if the user is in a group with a role for the product with the
+ # requested permissions
for product_group in get_product_groups(user, obj):
if role_has_permission(product_group.role.id, permission):
return True
return False
- elif isinstance(obj, Engagement) and permission in Permissions.get_engagement_permissions():
+ elif (
+ isinstance(obj, Engagement)
+ and permission in Permissions.get_engagement_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Test) and permission in Permissions.get_test_permissions():
+ elif (
+ isinstance(obj, Test)
+ and permission in Permissions.get_test_permissions()
+ ):
return user_has_permission(user, obj.engagement.product, permission)
- elif (isinstance(obj, Finding) or isinstance(obj, Stub_Finding)) and permission in Permissions.get_finding_permissions():
- return user_has_permission(user, obj.test.engagement.product, permission)
- elif isinstance(obj, Finding_Group) and permission in Permissions.get_finding_group_permissions():
- return user_has_permission(user, obj.test.engagement.product, permission)
- elif isinstance(obj, Endpoint) and permission in Permissions.get_endpoint_permissions():
+ elif (
+ isinstance(obj, Finding) or isinstance(obj, Stub_Finding)
+ ) and permission in Permissions.get_finding_permissions():
+ return user_has_permission(
+ user, obj.test.engagement.product, permission
+ )
+ elif (
+ isinstance(obj, Finding_Group)
+ and permission in Permissions.get_finding_group_permissions()
+ ):
+ return user_has_permission(
+ user, obj.test.engagement.product, permission
+ )
+ elif (
+ isinstance(obj, Endpoint)
+ and permission in Permissions.get_endpoint_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Languages) and permission in Permissions.get_language_permissions():
+ elif (
+ isinstance(obj, Languages)
+ and permission in Permissions.get_language_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, App_Analysis) and permission in Permissions.get_technology_permissions():
+ elif (
+ isinstance(obj, App_Analysis)
+ and permission in Permissions.get_technology_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Product_API_Scan_Configuration) and permission in Permissions.get_product_api_scan_configuration_permissions():
+ elif (
+ isinstance(obj, Product_API_Scan_Configuration)
+ and permission
+ in Permissions.get_product_api_scan_configuration_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Product_Type_Member) and permission in Permissions.get_product_type_member_permissions():
+ elif (
+ isinstance(obj, Product_Type_Member)
+ and permission in Permissions.get_product_type_member_permissions()
+ ):
if permission == Permissions.Product_Type_Member_Delete:
# Every member is allowed to remove himself
- return obj.user == user or user_has_permission(user, obj.product_type, permission)
+ return obj.user == user or user_has_permission(
+ user, obj.product_type, permission
+ )
else:
return user_has_permission(user, obj.product_type, permission)
- elif isinstance(obj, Product_Member) and permission in Permissions.get_product_member_permissions():
+ elif (
+ isinstance(obj, Product_Member)
+ and permission in Permissions.get_product_member_permissions()
+ ):
if permission == Permissions.Product_Member_Delete:
# Every member is allowed to remove himself
- return obj.user == user or user_has_permission(user, obj.product, permission)
+ return obj.user == user or user_has_permission(
+ user, obj.product, permission
+ )
else:
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Product_Type_Group) and permission in Permissions.get_product_type_group_permissions():
+ elif (
+ isinstance(obj, Product_Type_Group)
+ and permission in Permissions.get_product_type_group_permissions()
+ ):
return user_has_permission(user, obj.product_type, permission)
- elif isinstance(obj, Product_Group) and permission in Permissions.get_product_group_permissions():
+ elif (
+ isinstance(obj, Product_Group)
+ and permission in Permissions.get_product_group_permissions()
+ ):
return user_has_permission(user, obj.product, permission)
- elif isinstance(obj, Dojo_Group) and permission in Permissions.get_group_permissions():
- # Check if the user has a role for the group with the requested permissions
+ elif (
+ isinstance(obj, Dojo_Group)
+ and permission in Permissions.get_group_permissions()
+ ):
+ # Check if the user has a role for the group with the requested
+ # permissions
group_member = get_group_member(user, obj)
- return group_member is not None and role_has_permission(group_member.role.id, permission)
- elif isinstance(obj, Dojo_Group_Member) and permission in Permissions.get_group_member_permissions():
+ return group_member is not None and role_has_permission(
+ group_member.role.id, permission
+ )
+ elif (
+ isinstance(obj, Dojo_Group_Member)
+ and permission in Permissions.get_group_member_permissions()
+ ):
if permission == Permissions.Group_Member_Delete:
# Every user is allowed to remove himself
- return obj.user == user or user_has_permission(user, obj.group, permission)
+ return obj.user == user or user_has_permission(
+ user, obj.group, permission
+ )
else:
return user_has_permission(user, obj.group, permission)
+ elif (
+ isinstance(obj, Cred_Mapping)
+ and permission in Permissions.get_credential_permissions()
+ ):
+ if obj.product:
+ return user_has_permission(user, obj.product, permission)
+ if obj.engagement:
+ return user_has_permission(
+ user, obj.engagement.product, permission
+ )
+ if obj.test:
+ return user_has_permission(
+ user, obj.test.engagement.product, permission
+ )
+ if obj.finding:
+ return user_has_permission(
+ user, obj.finding.test.engagement.product, permission
+ )
else:
- raise NoAuthorizationImplementedError('No authorization implemented for class {} and permission {}'.
- format(type(obj).__name__, permission))
+ raise NoAuthorizationImplementedError(
+ f"No authorization implemented for class {type(obj).__name__} and permission {permission}"
+ )
def user_has_global_permission(user, permission):
-
if not user:
return False
- if user.is_superuser:
- return True
+ if user.is_anonymous:
+ return False
- if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
+ if user.is_superuser:
return True
- if user.is_staff and permission == Permissions.Product_Type_Add:
- return True
+ if permission == Permissions.Product_Type_Add:
+ if user_has_configuration_permission(user, "dojo.add_product_type"):
+ return True
- if hasattr(user, 'global_role') and user.global_role.role is not None and role_has_global_permission(user.global_role.role.id, permission):
+ if (
+ hasattr(user, "global_role")
+ and user.global_role.role is not None
+ and role_has_global_permission(user.global_role.role.id, permission)
+ ):
return True
for group in get_groups(user):
- if hasattr(group, 'global_role') and group.global_role.role is not None and role_has_global_permission(group.global_role.role.id, permission):
+ if (
+ hasattr(group, "global_role")
+ and group.global_role.role is not None
+ and role_has_global_permission(
+ group.global_role.role.id, permission
+ )
+ ):
return True
return False
-def user_has_configuration_permission_or_403(user, permission, legacy=None):
- if not user_has_configuration_permission(user, permission, legacy):
+def user_has_configuration_permission_or_403(user, permission):
+ if not user_has_configuration_permission(user, permission):
raise PermissionDenied()
@@ -149,7 +258,9 @@ def user_has_global_permission_or_403(user, permission):
def get_roles_for_permission(permission):
if not Permissions.has_value(permission):
- raise PermissionDoesNotExistError('Permission {} does not exist'.format(permission))
+ raise PermissionDoesNotExistError(
+ "Permission {} does not exist".format(permission)
+ )
roles_for_permissions = set()
roles = get_roles_with_permissions()
for role in roles:
@@ -163,7 +274,7 @@ def role_has_permission(role, permission):
if role is None:
return False
if not Roles.has_value(role):
- raise RoleDoesNotExistError('Role {} does not exist'.format(role))
+ raise RoleDoesNotExistError("Role {} does not exist".format(role))
roles = get_roles_with_permissions()
permissions = roles.get(role)
if not permissions:
@@ -175,7 +286,7 @@ def role_has_global_permission(role, permission):
if role is None:
return False
if not Roles.has_value(role):
- raise RoleDoesNotExistError('Role {} does not exist'.format(role))
+ raise RoleDoesNotExistError("Role {} does not exist".format(role))
roles = get_global_roles_with_permissions()
permissions = roles.get(role)
if permissions and permission in permissions:
@@ -205,7 +316,11 @@ def get_product_member(user, product):
@cache_for_request
def get_product_member_dict(user):
pm_dict = {}
- for product_member in Product_Member.objects.select_related('product').select_related('role').filter(user=user):
+ for product_member in (
+ Product_Member.objects.select_related("product")
+ .select_related("role")
+ .filter(user=user)
+ ):
pm_dict[product_member.product.id] = product_member
return pm_dict
@@ -217,7 +332,11 @@ def get_product_type_member(user, product_type):
@cache_for_request
def get_product_type_member_dict(user):
ptm_dict = {}
- for product_type_member in Product_Type_Member.objects.select_related('product_type').select_related('role').filter(user=user):
+ for product_type_member in (
+ Product_Type_Member.objects.select_related("product_type")
+ .select_related("role")
+ .filter(user=user)
+ ):
ptm_dict[product_type_member.product_type.id] = product_type_member
return ptm_dict
@@ -229,7 +348,11 @@ def get_product_groups(user, product):
@cache_for_request
def get_product_groups_dict(user):
pg_dict = {}
- for product_group in Product_Group.objects.select_related('product').select_related('role').filter(group__users=user):
+ for product_group in (
+ Product_Group.objects.select_related("product")
+ .select_related("role")
+ .filter(group__users=user)
+ ):
if pg_dict.get(product_group.product.id) is None:
pgu_list = []
else:
@@ -246,7 +369,11 @@ def get_product_type_groups(user, product_type):
@cache_for_request
def get_product_type_groups_dict(user):
pgt_dict = {}
- for product_type_group in Product_Type_Group.objects.select_related('product_type').select_related('role').filter(group__users=user):
+ for product_type_group in (
+ Product_Type_Group.objects.select_related("product_type")
+ .select_related("role")
+ .filter(group__users=user)
+ ):
if pgt_dict.get(product_type_group.product_type.id) is None:
pgtu_list = []
else:
@@ -258,7 +385,7 @@ def get_product_type_groups_dict(user):
@cache_for_request
def get_groups(user):
- return Dojo_Group.objects.select_related('global_role').filter(users=user)
+ return Dojo_Group.objects.select_related("global_role").filter(users=user)
def get_group_member(user, group):
@@ -268,6 +395,10 @@ def get_group_member(user, group):
@cache_for_request
def get_group_members_dict(user):
gu_dict = {}
- for group_member in Dojo_Group_Member.objects.select_related('group').select_related('role').filter(user=user):
+ for group_member in (
+ Dojo_Group_Member.objects.select_related("group")
+ .select_related("role")
+ .filter(user=user)
+ ):
gu_dict[group_member.group.id] = group_member
return gu_dict
diff --git a/dojo/authorization/authorization_decorators.py b/dojo/authorization/authorization_decorators.py
index 6ba8fa9fdb1..ec2c26aefdb 100644
--- a/dojo/authorization/authorization_decorators.py
+++ b/dojo/authorization/authorization_decorators.py
@@ -1,15 +1,20 @@
import functools
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
-from dojo.authorization.authorization import user_has_global_permission_or_403, user_has_permission_or_403, user_has_configuration_permission
+from dojo.authorization.authorization import (
+ user_has_global_permission_or_403,
+ user_has_permission_or_403,
+ user_has_configuration_permission,
+)
def user_is_authorized(model, permission, arg, lookup="pk", func=None):
- """Decorator for functions that ensures the user has permission on an object.
- """
+ """Decorator for functions that ensures the user has permission on an object."""
if func is None:
- return functools.partial(user_is_authorized, model, permission, arg, lookup)
+ return functools.partial(
+ user_is_authorized, model, permission, arg, lookup
+ )
@functools.wraps(func)
def _wrapped(request, *args, **kwargs):
@@ -33,8 +38,7 @@ def _wrapped(request, *args, **kwargs):
def user_has_global_permission(permission, func=None):
- """Decorator for functions that ensures the user has a (global) permission
- """
+ """Decorator for functions that ensures the user has a (global) permission"""
if func is None:
return functools.partial(user_has_global_permission, permission)
@@ -47,18 +51,17 @@ def _wrapped(request, *args, **kwargs):
return _wrapped
-def user_is_configuration_authorized(permission, legacy, func=None):
+def user_is_configuration_authorized(permission, func=None):
"""
Decorator for views that checks whether a user has a particular permission enabled.
"""
if func is None:
- return functools.partial(user_is_configuration_authorized, permission, legacy)
+ return functools.partial(user_is_configuration_authorized, permission)
@functools.wraps(func)
def _wrapped(request, *args, **kwargs):
-
- if not user_has_configuration_permission(request.user, permission, legacy):
+ if not user_has_configuration_permission(request.user, permission):
raise PermissionDenied
return func(request, *args, **kwargs)
diff --git a/dojo/authorization/roles_permissions.py b/dojo/authorization/roles_permissions.py
index 3c941e1afbc..779463258ff 100644
--- a/dojo/authorization/roles_permissions.py
+++ b/dojo/authorization/roles_permissions.py
@@ -19,7 +19,8 @@ def has_value(cls, value):
def django_enum(cls):
# decorator needed to enable enums in django templates
- # see https://stackoverflow.com/questions/35953132/how-to-access-enum-types-in-django-templates
+ # see
+ # https://stackoverflow.com/questions/35953132/how-to-access-enum-types-in-django-templates
cls.do_not_call_in_templates = True
return cls
@@ -119,6 +120,11 @@ class Permissions(IntEnum):
Product_Tracking_Files_Edit = 2606
Product_Tracking_Files_Delete = 2607
+ Credential_View = 2702
+ Credential_Add = 2703
+ Credential_Edit = 2706
+ Credential_Delete = 2707
+
@classmethod
def has_value(cls, value):
try:
@@ -129,75 +135,147 @@ def has_value(cls, value):
@classmethod
def get_engagement_permissions(cls):
- return {Permissions.Engagement_View, Permissions.Engagement_Edit,
- Permissions.Engagement_Delete, Permissions.Risk_Acceptance,
- Permissions.Test_Add, Permissions.Import_Scan_Result, Permissions.Note_Add,
- Permissions.Note_Delete, Permissions.Note_Edit, Permissions.Note_View_History} \
- .union(cls.get_test_permissions())
+ return {
+ Permissions.Engagement_View,
+ Permissions.Engagement_Edit,
+ Permissions.Engagement_Delete,
+ Permissions.Risk_Acceptance,
+ Permissions.Test_Add,
+ Permissions.Import_Scan_Result,
+ Permissions.Note_Add,
+ Permissions.Note_Delete,
+ Permissions.Note_Edit,
+ Permissions.Note_View_History,
+ }.union(cls.get_test_permissions())
@classmethod
def get_test_permissions(cls):
- return {Permissions.Test_View, Permissions.Test_Edit, Permissions.Test_Delete,
- Permissions.Finding_Add, Permissions.Import_Scan_Result, Permissions.Note_Add,
- Permissions.Note_Delete, Permissions.Note_Edit, Permissions.Note_View_History} \
- .union(cls.get_finding_permissions())
+ return {
+ Permissions.Test_View,
+ Permissions.Test_Edit,
+ Permissions.Test_Delete,
+ Permissions.Finding_Add,
+ Permissions.Import_Scan_Result,
+ Permissions.Note_Add,
+ Permissions.Note_Delete,
+ Permissions.Note_Edit,
+ Permissions.Note_View_History,
+ }.union(cls.get_finding_permissions())
@classmethod
def get_finding_permissions(cls):
- return {Permissions.Finding_View, Permissions.Finding_Edit, Permissions.Import_Scan_Result,
- Permissions.Finding_Delete, Permissions.Risk_Acceptance, Permissions.Note_Add,
- Permissions.Note_Delete, Permissions.Note_Edit, Permissions.Note_View_History} \
- .union(cls.get_finding_group_permissions())
+ return {
+ Permissions.Finding_View,
+ Permissions.Finding_Edit,
+ Permissions.Finding_Add,
+ Permissions.Import_Scan_Result,
+ Permissions.Finding_Delete,
+ Permissions.Note_Add,
+ Permissions.Risk_Acceptance,
+ Permissions.Note_Delete,
+ Permissions.Note_Edit,
+ Permissions.Note_View_History,
+ }.union(cls.get_finding_group_permissions())
@classmethod
def get_finding_group_permissions(cls):
- return {Permissions.Finding_Group_View, Permissions.Finding_Group_Edit,
- Permissions.Finding_Group_Delete}
+ return {
+ Permissions.Finding_Group_View,
+ Permissions.Finding_Group_Edit,
+ Permissions.Finding_Group_Delete,
+ }
@classmethod
def get_endpoint_permissions(cls):
- return {Permissions.Endpoint_View, Permissions.Endpoint_Edit, Permissions.Endpoint_Delete}
+ return {
+ Permissions.Endpoint_View,
+ Permissions.Endpoint_Edit,
+ Permissions.Endpoint_Delete,
+ }
@classmethod
def get_product_member_permissions(cls):
- return {Permissions.Product_View, Permissions.Product_Manage_Members,
- Permissions.Product_Member_Delete}
+ return {
+ Permissions.Product_View,
+ Permissions.Product_Manage_Members,
+ Permissions.Product_Member_Delete,
+ }
@classmethod
def get_product_type_member_permissions(cls):
- return {Permissions.Product_Type_View, Permissions.Product_Type_Manage_Members,
- Permissions.Product_Type_Member_Delete}
+ return {
+ Permissions.Product_Type_View,
+ Permissions.Product_Type_Manage_Members,
+ Permissions.Product_Type_Member_Delete,
+ }
@classmethod
def get_product_group_permissions(cls):
- return {Permissions.Product_Group_View, Permissions.Product_Group_Edit,
- Permissions.Product_Group_Delete}
+ return {
+ Permissions.Product_Group_View,
+ Permissions.Product_Group_Edit,
+ Permissions.Product_Group_Delete,
+ }
@classmethod
def get_product_type_group_permissions(cls):
- return {Permissions.Product_Type_Group_View, Permissions.Product_Type_Group_Edit,
- Permissions.Product_Type_Group_Delete}
+ return {
+ Permissions.Product_Type_Group_View,
+ Permissions.Product_Type_Group_Edit,
+ Permissions.Product_Type_Group_Delete,
+ }
@classmethod
def get_group_permissions(cls):
- return {Permissions.Group_View, Permissions.Group_Member_Delete, Permissions.Group_Manage_Members,
- Permissions.Group_Add_Owner, Permissions.Group_Edit, Permissions.Group_Delete}
+ return {
+ Permissions.Group_View,
+ Permissions.Group_Member_Delete,
+ Permissions.Group_Manage_Members,
+ Permissions.Group_Add_Owner,
+ Permissions.Group_Edit,
+ Permissions.Group_Delete,
+ }
@classmethod
def get_group_member_permissions(cls):
- return {Permissions.Group_View, Permissions.Group_Manage_Members, Permissions.Group_Member_Delete}
+ return {
+ Permissions.Group_View,
+ Permissions.Group_Manage_Members,
+ Permissions.Group_Member_Delete,
+ }
@classmethod
def get_language_permissions(cls):
- return {Permissions.Language_View, Permissions.Language_Edit, Permissions.Language_Delete}
+ return {
+ Permissions.Language_View,
+ Permissions.Language_Edit,
+ Permissions.Language_Delete,
+ }
@classmethod
def get_technology_permissions(cls):
- return {Permissions.Technology_View, Permissions.Technology_Edit, Permissions.Technology_Delete}
+ return {
+ Permissions.Technology_View,
+ Permissions.Technology_Edit,
+ Permissions.Technology_Delete,
+ }
@classmethod
def get_product_api_scan_configuration_permissions(cls):
- return {Permissions.Product_API_Scan_Configuration_View, Permissions.Product_API_Scan_Configuration_Edit, Permissions.Product_API_Scan_Configuration_Delete}
+ return {
+ Permissions.Product_API_Scan_Configuration_View,
+ Permissions.Product_API_Scan_Configuration_Edit,
+ Permissions.Product_API_Scan_Configuration_Delete,
+ }
+
+ @classmethod
+ def get_credential_permissions(cls):
+ return {
+ Permissions.Credential_View,
+ Permissions.Credential_Add,
+ Permissions.Credential_Edit,
+ Permissions.Credential_Delete,
+ }
def get_roles_with_permissions():
@@ -219,11 +297,13 @@ def get_roles_with_permissions():
Permissions.Technology_View,
Permissions.Product_API_Scan_Configuration_View,
Permissions.Product_Tracking_Files_View,
+ Permissions.Credential_View,
},
Roles.API_Importer: {
Permissions.Product_Type_View,
Permissions.Product_View,
Permissions.Engagement_View,
+ Permissions.Engagement_Add,
Permissions.Engagement_Edit,
Permissions.Test_View,
Permissions.Test_Edit,
@@ -234,60 +314,50 @@ def get_roles_with_permissions():
Permissions.Product_Group_View,
Permissions.Product_Type_Group_View,
Permissions.Technology_View,
- Permissions.Import_Scan_Result
+ Permissions.Import_Scan_Result,
+ Permissions.Credential_View,
},
Roles.Writer: {
Permissions.Product_Type_View,
-
Permissions.Product_View,
-
Permissions.Engagement_View,
Permissions.Engagement_Add,
Permissions.Engagement_Edit,
Permissions.Risk_Acceptance,
-
Permissions.Test_View,
Permissions.Test_Add,
Permissions.Test_Edit,
-
Permissions.Finding_View,
Permissions.Finding_Add,
Permissions.Import_Scan_Result,
Permissions.Finding_Edit,
-
Permissions.Finding_Group_View,
Permissions.Finding_Group_Add,
Permissions.Finding_Group_Edit,
Permissions.Finding_Group_Delete,
-
Permissions.Endpoint_View,
Permissions.Endpoint_Add,
Permissions.Endpoint_Edit,
-
Permissions.Benchmark_Edit,
-
Permissions.Component_View,
-
Permissions.Note_View_History,
Permissions.Note_Edit,
Permissions.Note_Add,
-
Permissions.Product_Group_View,
Permissions.Product_Type_Group_View,
Permissions.Group_View,
-
Permissions.Language_View,
Permissions.Language_Add,
Permissions.Language_Edit,
Permissions.Language_Delete,
-
Permissions.Technology_View,
Permissions.Technology_Add,
Permissions.Technology_Edit,
-
Permissions.Product_API_Scan_Configuration_View,
-
Permissions.Product_Tracking_Files_View,
+ Permissions.Credential_View,
+ Permissions.Credential_Add,
+ Permissions.Credential_Edit,
},
Roles.Maintainer: {
Permissions.Product_Type_Add_Product,
@@ -295,84 +365,72 @@ def get_roles_with_permissions():
Permissions.Product_Type_Member_Delete,
Permissions.Product_Type_Manage_Members,
Permissions.Product_Type_Edit,
-
Permissions.Product_View,
Permissions.Product_Member_Delete,
Permissions.Product_Manage_Members,
Permissions.Product_Configure_Notifications,
Permissions.Product_Edit,
-
Permissions.Engagement_View,
Permissions.Engagement_Add,
Permissions.Engagement_Edit,
Permissions.Engagement_Delete,
Permissions.Risk_Acceptance,
-
Permissions.Test_View,
Permissions.Test_Add,
Permissions.Test_Edit,
Permissions.Test_Delete,
-
Permissions.Finding_View,
Permissions.Finding_Add,
Permissions.Import_Scan_Result,
Permissions.Finding_Edit,
Permissions.Finding_Delete,
-
Permissions.Finding_Group_View,
Permissions.Finding_Group_Add,
Permissions.Finding_Group_Edit,
Permissions.Finding_Group_Delete,
-
Permissions.Endpoint_View,
Permissions.Endpoint_Add,
Permissions.Endpoint_Edit,
Permissions.Endpoint_Delete,
-
Permissions.Benchmark_Edit,
Permissions.Benchmark_Delete,
-
Permissions.Component_View,
-
Permissions.Note_View_History,
Permissions.Note_Edit,
Permissions.Note_Add,
Permissions.Note_Delete,
-
Permissions.Product_Group_View,
Permissions.Product_Group_Add,
Permissions.Product_Group_Edit,
Permissions.Product_Group_Delete,
-
Permissions.Product_Type_Group_View,
Permissions.Product_Type_Group_Add,
Permissions.Product_Type_Group_Edit,
Permissions.Product_Type_Group_Delete,
-
Permissions.Group_View,
Permissions.Group_Edit,
Permissions.Group_Manage_Members,
Permissions.Group_Member_Delete,
-
Permissions.Language_View,
Permissions.Language_Add,
Permissions.Language_Edit,
Permissions.Language_Delete,
-
Permissions.Technology_View,
Permissions.Technology_Add,
Permissions.Technology_Edit,
Permissions.Technology_Delete,
-
Permissions.Product_API_Scan_Configuration_View,
Permissions.Product_API_Scan_Configuration_Add,
Permissions.Product_API_Scan_Configuration_Edit,
Permissions.Product_API_Scan_Configuration_Delete,
-
Permissions.Product_Tracking_Files_View,
Permissions.Product_Tracking_Files_Add,
Permissions.Product_Tracking_Files_Edit,
Permissions.Product_Tracking_Files_Delete,
+ Permissions.Credential_View,
+ Permissions.Credential_Add,
+ Permissions.Credential_Edit,
+ Permissions.Credential_Delete,
},
Roles.Owner: {
Permissions.Product_Type_Add_Product,
@@ -382,7 +440,6 @@ def get_roles_with_permissions():
Permissions.Product_Type_Member_Add_Owner,
Permissions.Product_Type_Edit,
Permissions.Product_Type_Delete,
-
Permissions.Product_View,
Permissions.Product_Member_Delete,
Permissions.Product_Manage_Members,
@@ -390,83 +447,72 @@ def get_roles_with_permissions():
Permissions.Product_Configure_Notifications,
Permissions.Product_Edit,
Permissions.Product_Delete,
-
Permissions.Engagement_View,
Permissions.Engagement_Add,
Permissions.Engagement_Edit,
Permissions.Engagement_Delete,
Permissions.Risk_Acceptance,
-
Permissions.Test_View,
Permissions.Test_Add,
Permissions.Test_Edit,
Permissions.Test_Delete,
-
Permissions.Finding_View,
Permissions.Finding_Add,
Permissions.Import_Scan_Result,
Permissions.Finding_Edit,
Permissions.Finding_Delete,
-
Permissions.Finding_Group_View,
Permissions.Finding_Group_Add,
Permissions.Finding_Group_Edit,
Permissions.Finding_Group_Delete,
-
Permissions.Endpoint_View,
Permissions.Endpoint_Add,
Permissions.Endpoint_Edit,
Permissions.Endpoint_Delete,
-
Permissions.Benchmark_Edit,
Permissions.Benchmark_Delete,
-
Permissions.Component_View,
-
Permissions.Note_View_History,
Permissions.Note_Edit,
Permissions.Note_Add,
Permissions.Note_Delete,
-
Permissions.Product_Group_View,
Permissions.Product_Group_Add,
Permissions.Product_Group_Add_Owner,
Permissions.Product_Group_Edit,
Permissions.Product_Group_Delete,
-
Permissions.Product_Type_Group_View,
Permissions.Product_Type_Group_Add,
Permissions.Product_Type_Group_Add_Owner,
Permissions.Product_Type_Group_Edit,
Permissions.Product_Type_Group_Delete,
-
Permissions.Group_View,
Permissions.Group_Edit,
Permissions.Group_Manage_Members,
Permissions.Group_Member_Delete,
Permissions.Group_Add_Owner,
Permissions.Group_Delete,
-
Permissions.Language_View,
Permissions.Language_Add,
Permissions.Language_Edit,
Permissions.Language_Delete,
-
Permissions.Technology_View,
Permissions.Technology_Add,
Permissions.Technology_Edit,
Permissions.Technology_Delete,
-
Permissions.Product_API_Scan_Configuration_View,
Permissions.Product_API_Scan_Configuration_Add,
Permissions.Product_API_Scan_Configuration_Edit,
Permissions.Product_API_Scan_Configuration_Delete,
-
Permissions.Product_Tracking_Files_View,
Permissions.Product_Tracking_Files_Add,
Permissions.Product_Tracking_Files_Edit,
Permissions.Product_Tracking_Files_Delete,
- }
+ Permissions.Credential_View,
+ Permissions.Credential_Add,
+ Permissions.Credential_Edit,
+ Permissions.Credential_Delete,
+ },
}
@@ -475,10 +521,6 @@ def get_global_roles_with_permissions():
Extra permissions for global roles, on top of the permissions granted to the "normal" roles above.
"""
return {
- Roles.Maintainer: {
- Permissions.Product_Type_Add
- },
- Roles.Owner: {
- Permissions.Product_Type_Add
- }
+ Roles.Maintainer: {Permissions.Product_Type_Add},
+ Roles.Owner: {Permissions.Product_Type_Add},
}
diff --git a/dojo/banner/urls.py b/dojo/banner/urls.py
index 557fb8aef26..4b99585db94 100644
--- a/dojo/banner/urls.py
+++ b/dojo/banner/urls.py
@@ -1,7 +1,8 @@
-from django.conf.urls import url
+from django.urls import re_path
from dojo.banner import views
urlpatterns = [
- url(r'^configure_banner$', views.configure_banner,
- name='configure_banner'),
+ re_path(
+ r"^configure_banner$", views.configure_banner, name="configure_banner"
+ ),
]
diff --git a/dojo/banner/views.py b/dojo/banner/views.py
index a1f398a2b4e..bfd46160621 100644
--- a/dojo/banner/views.py
+++ b/dojo/banner/views.py
@@ -1,6 +1,5 @@
import logging
-from django.contrib.auth.decorators import user_passes_test
from django.shortcuts import render, get_object_or_404
from django.contrib import messages
from django.urls import reverse
@@ -9,35 +8,43 @@
from dojo.forms import LoginBanner
from dojo.models import BannerConf
+from dojo.authorization.authorization_decorators import (
+ user_is_configuration_authorized,
+)
logger = logging.getLogger(__name__)
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_configuration_authorized("dojo.change_bannerconf")
def configure_banner(request):
banner_config = get_object_or_404(BannerConf, id=1)
- if request.method == 'POST':
+ if request.method == "POST":
form = LoginBanner(request.POST)
if form.is_valid():
- banner_config.banner_enable = form.cleaned_data['banner_enable']
- banner_config.banner_message = form.cleaned_data['banner_message']
+ banner_config.banner_enable = form.cleaned_data["banner_enable"]
+ banner_config.banner_message = form.cleaned_data["banner_message"]
banner_config.save()
messages.add_message(
request,
messages.SUCCESS,
- 'Banner updated successfully.',
+ "Banner updated successfully.",
extra_tags="alert-success",
)
return HttpResponseRedirect(reverse("configure_banner"))
else:
# List the banner configuration
- form = LoginBanner(initial={
- 'banner_enable': banner_config.banner_enable,
- 'banner_message': banner_config.banner_message
- })
+ form = LoginBanner(
+ initial={
+ "banner_enable": banner_config.banner_enable,
+ "banner_message": banner_config.banner_message,
+ }
+ )
- add_breadcrumb(title="Banner Configuration", top_level=True, request=request)
- return render(request, 'dojo/banner.html', {
- 'form': form,
- 'banner_message': banner_config.banner_message
- })
+ add_breadcrumb(
+ title="Banner Configuration", top_level=True, request=request
+ )
+ return render(
+ request,
+ "dojo/banner.html",
+ {"form": form, "banner_message": banner_config.banner_message},
+ )
diff --git a/dojo/benchmark/urls.py b/dojo/benchmark/urls.py
index 147c3f66982..f30e3fa8c85 100644
--- a/dojo/benchmark/urls.py
+++ b/dojo/benchmark/urls.py
@@ -1,9 +1,35 @@
-from django.conf.urls import url
+from django.urls import re_path
from . import views
urlpatterns = [
- url(r'^benchmark/(?P\d+)/type/(?P\d+)$', views.benchmark_view, name='view_product_benchmark'),
- url(r'^benchmark/(?P\d+)/type/(?P\d+)/category/(?P\d+)$', views.benchmark_view, name='view_product_benchmark'),
- url(r'^benchmark/(?P\d+)/type/(?P\d+)/category/(?P\d+)/edit/(?P\d+)$', views.benchmark_view, name='edit_benchmark'),
- url(r'^benchmark/(?P\d+)/type/(?P\d+)/delete$', views.delete, name='delete_product_benchmark')
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P\d+)$",
+ views.benchmark_view,
+ name="view_product_benchmark",
+ ),
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P\d+)/category/(?P\d+)$",
+ views.benchmark_view,
+ name="view_product_benchmark",
+ ),
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P\d+)/category/(?P\d+)/edit/(?P\d+)$",
+ views.benchmark_view,
+ name="edit_benchmark",
+ ),
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P\d+)/delete$",
+ views.delete,
+ name="delete_product_benchmark",
+ ),
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P<_type>\d+)/update$",
+ views.update_benchmark,
+ name="update_product_benchmark",
+ ),
+ re_path(
+ r"^benchmark/(?P\d+)/type/(?P<_type>\d+)/summary/(?P\d+)/update$",
+ views.update_benchmark_summary,
+ name="update_product_benchmark_summary",
+ ),
]
diff --git a/dojo/benchmark/views.py b/dojo/benchmark/views.py
index 16a93181212..86961e66fe3 100644
--- a/dojo/benchmark/views.py
+++ b/dojo/benchmark/views.py
@@ -1,15 +1,30 @@
import logging
from django.contrib import messages
from django.urls import reverse
-from django.http import HttpResponseRedirect
+from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import render, get_object_or_404
-from django.forms import modelformset_factory
from django.db.models import Count, Q
+from django.utils.translation import gettext as _
+
from dojo.forms import Benchmark_Product_SummaryForm, DeleteBenchmarkForm
-from dojo.models import Benchmark_Type, Benchmark_Category, Benchmark_Requirement, Benchmark_Product, Product, Benchmark_Product_Summary
-from dojo.utils import add_breadcrumb, Product_Tab
+from dojo.models import (
+ Benchmark_Type,
+ Benchmark_Category,
+ Benchmark_Requirement,
+ Benchmark_Product,
+ Product,
+ Benchmark_Product_Summary,
+)
+from dojo.utils import (
+ add_breadcrumb,
+ Product_Tab,
+ redirect_to_return_url_or_else,
+)
from dojo.authorization.authorization_decorators import user_is_authorized
from dojo.authorization.roles_permissions import Permissions
+from dojo.templatetags.display_tags import asvs_level
+
+from crum import get_current_user
logger = logging.getLogger(__name__)
@@ -24,34 +39,148 @@ def add_benchmark(queryset, product):
try:
Benchmark_Product.objects.bulk_create(requirements)
- except:
+ except Exception:
pass
+def update_benchmark(request, pid, _type):
+ if request.method == "POST":
+ bench_id = request.POST.get("bench_id")
+ field = request.POST.get("field")
+ value = request.POST.get("value")
+ value = {"true": True, "false": False}.get(value, value)
+
+ if field in [
+ "enabled",
+ "pass_fail",
+ "notes",
+ "get_notes",
+ "delete_notes",
+ ]:
+ bench = Benchmark_Product.objects.get(id=bench_id)
+ if field == "enabled":
+ bench.enabled = value
+ elif field == "pass_fail":
+ bench.pass_fail = value
+ elif field in ["notes", "get_notes", "delete_notes"]:
+ if field == "notes":
+ bench.notes.create(entry=value, author=get_current_user())
+ if field == "delete_notes":
+ bench.notes.remove(value)
+ notes = bench.notes.order_by("id")
+ return JsonResponse(
+ {
+ "notes": [
+ {
+ "id": n.id,
+ "entry": n.entry,
+ "author": n.author.get_full_name(),
+ "date": n.date.ctime(),
+ }
+ for n in notes
+ ]
+ }
+ )
+
+ bench.save()
+ return JsonResponse({field: value})
+
+ return redirect_to_return_url_or_else(
+ request, reverse("view_product_benchmark", args=(pid, _type))
+ )
+
+
+def update_benchmark_summary(request, pid, _type, summary):
+ if request.method == "POST":
+ field = request.POST.get("field")
+ value = request.POST.get("value")
+ value = {"true": True, "false": False}.get(value, value)
+
+ if field in ["publish", "desired_level"]:
+ summary = Benchmark_Product_Summary.objects.get(id=summary)
+ data = {}
+ if field == "publish":
+ summary.publish = value
+ data = {"publish": value}
+ elif field == "desired_level":
+ summary.desired_level = value
+ data = {"desired_level": value, "text": asvs_level(summary)}
+
+ summary.save()
+ return JsonResponse(data)
+
+ return redirect_to_return_url_or_else(
+ request, reverse("view_product_benchmark", args=(pid, _type))
+ )
+
+
def return_score(queryset):
asvs_level_1_benchmark = 0
asvs_level_1_score = 0
for item in queryset:
if item["pass_fail"]:
asvs_level_1_score = item["pass_fail__count"]
- asvs_level_1_benchmark = asvs_level_1_benchmark + item["pass_fail__count"]
+ asvs_level_1_benchmark = (
+ asvs_level_1_benchmark + item["pass_fail__count"]
+ )
return asvs_level_1_benchmark, asvs_level_1_score
def score_asvs(product, benchmark_type):
# Compliant to ASVS level 1 benchmarks
- asvs_level_1 = Benchmark_Product.objects.filter(enabled=True, control__enabled=True, product=product, control__category__type=benchmark_type, control__category__enabled=True, control__level_1=True).values('pass_fail').annotate(Count('pass_fail')).order_by()
+ asvs_level_1 = (
+ Benchmark_Product.objects.filter(
+ enabled=True,
+ control__enabled=True,
+ product=product,
+ control__category__type=benchmark_type,
+ control__category__enabled=True,
+ control__level_1=True,
+ )
+ .values("pass_fail")
+ .annotate(Count("pass_fail"))
+ .order_by()
+ )
asvs_level_1_benchmark, asvs_level_1_score = return_score(asvs_level_1)
# Compliant to ASVS level 2 benchmarks
- asvs_level_2 = Benchmark_Product.objects.filter(~Q(control__level_1=True), enabled=True, control__enabled=True, product=product, control__category__type=benchmark_type, control__category__enabled=True, control__level_2=True).values('pass_fail').annotate(Count('pass_fail')).order_by()
+ asvs_level_2 = (
+ Benchmark_Product.objects.filter(
+ ~Q(control__level_1=True),
+ enabled=True,
+ control__enabled=True,
+ product=product,
+ control__category__type=benchmark_type,
+ control__category__enabled=True,
+ control__level_2=True,
+ )
+ .values("pass_fail")
+ .annotate(Count("pass_fail"))
+ .order_by()
+ )
asvs_level_2_benchmark, asvs_level_2_score = return_score(asvs_level_2)
# Compliant to ASVS level 3 benchmarks
- asvs_level_3 = Benchmark_Product.objects.filter(~Q(control__level_1=True), ~Q(control__level_2=True), enabled=True, control__enabled=True, control__category__enabled=True, product=product, control__category__type=benchmark_type, control__level_3=True).values('pass_fail').annotate(Count('pass_fail')).order_by()
+ asvs_level_3 = (
+ Benchmark_Product.objects.filter(
+ ~Q(control__level_1=True),
+ ~Q(control__level_2=True),
+ enabled=True,
+ control__enabled=True,
+ control__category__enabled=True,
+ product=product,
+ control__category__type=benchmark_type,
+ control__level_3=True,
+ )
+ .values("pass_fail")
+ .annotate(Count("pass_fail"))
+ .order_by()
+ )
asvs_level_3_benchmark, asvs_level_3_score = return_score(asvs_level_3)
- benchmark_product_summary = Benchmark_Product_Summary.objects.get(product=product, benchmark_type=benchmark_type)
+ benchmark_product_summary = Benchmark_Product_Summary.objects.get(
+ product=product, benchmark_type=benchmark_type
+ )
benchmark_product_summary.asvs_level_1_benchmark = asvs_level_1_benchmark
benchmark_product_summary.asvs_level_1_score = asvs_level_1_score
@@ -63,100 +192,140 @@ def score_asvs(product, benchmark_type):
benchmark_product_summary.save()
-@user_is_authorized(Product, Permissions.Benchmark_Edit, 'pid')
+@user_is_authorized(Product, Permissions.Benchmark_Edit, "pid")
def benchmark_view(request, pid, type, cat=None):
product = get_object_or_404(Product, id=pid)
benchmark_type = get_object_or_404(Benchmark_Type, id=type)
- benchmark_category = Benchmark_Category.objects.filter(type=type, enabled=True).order_by('name')
- category_name = ""
+ benchmark_category = Benchmark_Category.objects.filter(
+ type=type, enabled=True
+ ).order_by("name")
# Add requirements to the product
- add_benchmark(Benchmark_Requirement.objects.filter(category__type=type, category__type__enabled=True, enabled=True).all(), product)
-
- if cat:
- category_name = Benchmark_Category.objects.get(id=cat, enabled=True).name
+ new_benchmarks = Benchmark_Requirement.objects.filter(
+ category__type=type, category__type__enabled=True, enabled=True
+ ).exclude(
+ id__in=Benchmark_Product.objects.filter(product=product).values_list(
+ "control_id", flat=True
+ )
+ )
+ add_benchmark(new_benchmarks, product)
# Create the benchmark summary category
try:
- benchmark_product_summary = Benchmark_Product_Summary.objects.get(product=product, benchmark_type=benchmark_type)
- except:
- pass
- benchmark_product_summary = Benchmark_Product_Summary(product=product, benchmark_type=benchmark_type)
+ benchmark_product_summary = Benchmark_Product_Summary.objects.get(
+ product=product, benchmark_type=benchmark_type
+ )
+ except Exception:
+ benchmark_product_summary = Benchmark_Product_Summary(
+ product=product, benchmark_type=benchmark_type
+ )
benchmark_product_summary.save()
- # Insert any new benchmarks since last created
- new_benchmarks = Benchmark_Requirement.objects.filter(category__type=type, category__type__enabled=True, enabled=True).exclude(id__in=Benchmark_Product.objects.filter(product=product).values_list('control_id', flat=True))
- add_benchmark(new_benchmarks, product)
-
- Benchmark_ProductFormSet = modelformset_factory(Benchmark_Product, exclude=['product, control'], extra=0)
-
- if request.method == 'POST':
- form = Benchmark_ProductFormSet(request.POST)
- summary_form = Benchmark_Product_SummaryForm(request.POST, instance=benchmark_product_summary)
-
- if form.is_valid():
- # print summary_form.errors
- summary_form_save = summary_form.save()
- form_save = form.save()
- score_asvs(product, benchmark_type)
- benchmark_product_summary = Benchmark_Product_Summary.objects.get(product=product, benchmark_type=benchmark_type)
-
- messages.add_message(request,
- messages.SUCCESS,
- 'Benchmarks saved.',
- extra_tags='alert-success')
-
- add_breadcrumb(title="Benchmarks", top_level=False, request=request)
-
if cat:
- benchmarks = Benchmark_Product.objects.filter(product=product.id, control__category=cat, control__category__enabled=True, control__category__type=type, control__enabled=True).all().order_by('control__objective_number')
-
- benchmark_formset = Benchmark_ProductFormSet(queryset=Benchmark_Product.objects.filter(product=product.id, control__category=cat, control__category__enabled=True, control__category__type=type, control__enabled=True).all().order_by('control__objective_number'))
+ benchmarks = (
+ Benchmark_Product.objects.select_related(
+ "control", "control__category"
+ )
+ .filter(
+ product=product.id,
+ control__category=cat,
+ control__category__enabled=True,
+ control__category__type=type,
+ control__enabled=True,
+ )
+ .all()
+ .order_by("control__objective_number")
+ )
else:
- benchmarks = Benchmark_Product.objects.filter(product=product.id, control__category__enabled=True, control__category__type=type, control__enabled=True).all().order_by('control__category__name', 'control__objective_number')
-
- benchmark_formset = Benchmark_ProductFormSet(queryset=Benchmark_Product.objects.filter(product=product.id, control__category__enabled=True, control__category__type=type, control__enabled=True).all().order_by('control__category__name', 'control__objective_number'))
-
- benchmark_summary_form = Benchmark_Product_SummaryForm(instance=benchmark_product_summary)
-
- product_tab = Product_Tab(pid, title="Benchmarks", tab="benchmarks")
-
- return render(request, 'dojo/benchmark.html',
- {'benchmarks': benchmarks,
- 'active_tab': 'benchmarks',
- 'product_tab': product_tab,
- 'benchmark_product_summary': benchmark_product_summary,
- 'benchmark_summary_form': benchmark_summary_form,
- 'benchmark_formset': benchmark_formset,
- 'benchmark_type': benchmark_type,
- 'product': product,
- 'category_name': category_name,
- 'benchmark_category': benchmark_category})
-
-
-@user_is_authorized(Product, Permissions.Benchmark_Delete, 'pid')
+ benchmarks = (
+ Benchmark_Product.objects.select_related(
+ "control", "control__category"
+ )
+ .filter(
+ product=product.id,
+ control__category__enabled=True,
+ control__category__type=type,
+ control__enabled=True,
+ )
+ .all()
+ .order_by("control__category__name", "control__objective_number")
+ )
+
+ benchmark_summary_form = Benchmark_Product_SummaryForm(
+ instance=benchmark_product_summary
+ )
+
+ noted_benchmarks = (
+ benchmarks.filter(notes__isnull=False).order_by("id").distinct()
+ )
+ for bench in benchmarks:
+ if bench.id in [b.id for b in noted_benchmarks]:
+ bench.noted = True
+ else:
+ bench.noted = False
+ benchmarks = sorted(
+ benchmarks,
+ key=lambda x: [int(_) for _ in x.control.objective_number.split(".")],
+ )
+ benchmark_category = sorted(
+ benchmark_category, key=lambda x: int(x.name[:3].strip("V: "))
+ )
+
+ product_tab = Product_Tab(product, title=_("Benchmarks"), tab="benchmarks")
+
+ add_breadcrumb(title=_("Benchmarks"), top_level=False, request=request)
+
+ return render(
+ request,
+ "dojo/benchmark.html",
+ {
+ "benchmarks": benchmarks,
+ "active_tab": "benchmarks",
+ "product_tab": product_tab,
+ "benchmark_product_summary": benchmark_product_summary,
+ "benchmark_summary_form": benchmark_summary_form,
+ "benchmark_type": benchmark_type,
+ "product": product,
+ "benchmark_category": benchmark_category,
+ },
+ )
+
+
+@user_is_authorized(Product, Permissions.Benchmark_Delete, "pid")
def delete(request, pid, type):
product = get_object_or_404(Product, id=pid)
- benchmark_type = get_object_or_404(Benchmark_Type, id=type)
- benchmark_product_summary = Benchmark_Product_Summary.objects.filter(product=product, benchmark_type=type).first()
+ benchmark_product_summary = Benchmark_Product_Summary.objects.filter(
+ product=product, benchmark_type=type
+ ).first()
form = DeleteBenchmarkForm(instance=benchmark_product_summary)
- if request.method == 'POST':
- if 'id' in request.POST and str(benchmark_product_summary.id) == request.POST['id']:
- form = DeleteBenchmarkForm(request.POST, instance=benchmark_product_summary)
+ if request.method == "POST":
+ if (
+ "id" in request.POST
+ and str(benchmark_product_summary.id) == request.POST["id"]
+ ):
+ form = DeleteBenchmarkForm(
+ request.POST, instance=benchmark_product_summary
+ )
if form.is_valid():
- benchmark_product = Benchmark_Product.objects.filter(product=product, control__category__type=type)
+ benchmark_product = Benchmark_Product.objects.filter(
+ product=product, control__category__type=type
+ )
benchmark_product.delete()
benchmark_product_summary.delete()
- messages.add_message(request,
- messages.SUCCESS,
- 'Benchmarks removed.',
- extra_tags='alert-success')
- return HttpResponseRedirect(reverse('product'))
-
- product_tab = Product_Tab(pid, title="Delete Benchmarks", tab="benchmarks")
- return render(request, 'dojo/delete_benchmark.html',
- {'product': product,
- 'form': form,
- 'product_tab': product_tab
- })
+ messages.add_message(
+ request,
+ messages.SUCCESS,
+ _("Benchmarks removed."),
+ extra_tags="alert-success",
+ )
+ return HttpResponseRedirect(reverse("product"))
+
+ product_tab = Product_Tab(
+ product, title=_("Delete Benchmarks"), tab="benchmarks"
+ )
+ return render(
+ request,
+ "dojo/delete_benchmark.html",
+ {"product": product, "form": form, "product_tab": product_tab},
+ )
diff --git a/dojo/components/sql_group_concat.py b/dojo/components/sql_group_concat.py
index b86d417d12e..5aa8f10d645 100644
--- a/dojo/components/sql_group_concat.py
+++ b/dojo/components/sql_group_concat.py
@@ -2,26 +2,34 @@
class Sql_GroupConcat(Aggregate):
- function = 'GROUP_CONCAT'
+ function = "GROUP_CONCAT"
allow_distinct = True
- def __init__(self, expression, separator, distinct=False, ordering=None, **extra):
+ def __init__(
+ self, expression, separator, distinct=False, ordering=None, **extra
+ ):
self.separator = separator
- super(Sql_GroupConcat, self).__init__(expression,
- distinct='DISTINCT ' if distinct else '',
- ordering=' ORDER BY %s' % ordering if ordering is not None else '',
- separator=' SEPARATOR "%s"' % separator,
- output_field=CharField(),
- **extra)
+ super(Sql_GroupConcat, self).__init__(
+ expression,
+ distinct="DISTINCT " if distinct else "",
+ ordering=" ORDER BY %s" % ordering if ordering is not None else "",
+ separator=' SEPARATOR "%s"' % separator,
+ output_field=CharField(),
+ **extra
+ )
def as_mysql(self, compiler, connection):
- return super().as_sql(compiler,
- connection,
- template='%(function)s(%(distinct)s%(expressions)s%(ordering)s%(separator)s)',
- separator=' SEPARATOR \'%s\'' % self.separator)
+ return super().as_sql(
+ compiler,
+ connection,
+ template="%(function)s(%(distinct)s%(expressions)s%(ordering)s%(separator)s)",
+ separator=" SEPARATOR '%s'" % self.separator,
+ )
def as_sql(self, compiler, connection, **extra):
- return super().as_sql(compiler,
- connection,
- template='%(function)s(%(distinct)s%(expressions)s%(ordering)s)',
- **extra)
+ return super().as_sql(
+ compiler,
+ connection,
+ template="%(function)s(%(distinct)s%(expressions)s%(ordering)s)",
+ **extra
+ )
diff --git a/dojo/components/urls.py b/dojo/components/urls.py
index 116491f1921..0183e3e8079 100644
--- a/dojo/components/urls.py
+++ b/dojo/components/urls.py
@@ -1,7 +1,6 @@
-from django.conf.urls import url
+from django.urls import re_path
from dojo.components import views
urlpatterns = [
- url(r'^components$', views.components,
- name='components'),
+ re_path(r"^components$", views.components, name="components"),
]
diff --git a/dojo/components/views.py b/dojo/components/views.py
index 7cc5787149b..2a8f226f83a 100644
--- a/dojo/components/views.py
+++ b/dojo/components/views.py
@@ -10,34 +10,61 @@
def components(request):
- add_breadcrumb(title='Components', top_level=True, request=request)
- separator = ', '
- # Get components ordered by component_name and concat component versions to the same row
+ add_breadcrumb(title="Components", top_level=True, request=request)
+ separator = ", "
+ # Get components ordered by component_name and concat component versions
+ # to the same row
component_query = get_authorized_findings(Permissions.Finding_View)
- if connection.vendor == 'postgresql':
- component_query = component_query.values("component_name").order_by('component_name').annotate(
- component_version=StringAgg('component_version', delimiter=separator, distinct=True))
+ if connection.vendor == "postgresql":
+ component_query = (
+ component_query.values("component_name")
+ .order_by("component_name")
+ .annotate(
+ component_version=StringAgg(
+ "component_version", delimiter=separator, distinct=True
+ )
+ )
+ )
else:
- component_query = component_query.values("component_name").order_by('component_name')
- component_query = component_query.annotate(component_version=Sql_GroupConcat(
- 'component_version', separator=separator, distinct=True))
+ component_query = component_query.values("component_name").order_by(
+ "component_name"
+ )
+ component_query = component_query.annotate(
+ component_version=Sql_GroupConcat(
+ "component_version", separator=separator, distinct=True
+ )
+ )
# Append counts
- component_query = component_query.annotate(total=Count('id')).order_by('component_name')
- component_query = component_query.annotate(active=Count('id', filter=Q(active=True)))
- component_query = component_query.annotate(duplicate=(Count('id', filter=Q(duplicate=True))))
- component_query = component_query.order_by('-total') # Default sort by total descending
+ component_query = component_query.annotate(total=Count("id")).order_by(
+ "component_name"
+ )
+ component_query = component_query.annotate(
+ active=Count("id", filter=Q(active=True))
+ )
+ component_query = component_query.annotate(
+ duplicate=(Count("id", filter=Q(duplicate=True)))
+ )
+ component_query = component_query.order_by(
+ "-total"
+ ) # Default sort by total descending
comp_filter = ComponentFilter(request.GET, queryset=component_query)
result = get_page_items(request, comp_filter.qs, 25)
# Filter out None values for auto-complete
- component_words = component_query.exclude(component_name__isnull=True).values_list('component_name', flat=True)
+ component_words = component_query.exclude(
+ component_name__isnull=True
+ ).values_list("component_name", flat=True)
- return render(request, 'dojo/components.html', {
- 'filter': comp_filter,
- 'result': result,
- 'component_words': sorted(set(component_words))
- })
+ return render(
+ request,
+ "dojo/components.html",
+ {
+ "filter": comp_filter,
+ "result": result,
+ "component_words": sorted(set(component_words)),
+ },
+ )
diff --git a/dojo/context_processors.py b/dojo/context_processors.py
index 96bf3121952..c0bbb250469 100644
--- a/dojo/context_processors.py
+++ b/dojo/context_processors.py
@@ -2,29 +2,56 @@
from django.conf import settings
-def globalize_oauth_vars(request):
+def globalize_vars(request):
# return the value you want as a dictionnary. you may add multiple values in there.
- return {'SHOW_LOGIN_FORM': settings.SHOW_LOGIN_FORM,
- 'FORGOT_PASSWORD': settings.FORGOT_PASSWORD,
- 'CLASSIC_AUTH_ENABLED': settings.CLASSIC_AUTH_ENABLED,
- 'AUTH0_ENABLED': settings.AUTH0_OAUTH2_ENABLED,
- 'GOOGLE_ENABLED': settings.GOOGLE_OAUTH_ENABLED,
- 'OKTA_ENABLED': settings.OKTA_OAUTH_ENABLED,
- 'GITLAB_ENABLED': settings.GITLAB_OAUTH2_ENABLED,
- 'AZUREAD_TENANT_OAUTH2_ENABLED': settings.AZUREAD_TENANT_OAUTH2_ENABLED,
- 'SAML2_ENABLED': settings.SAML2_ENABLED,
- 'SAML2_LOGIN_BUTTON_TEXT': settings.SAML2_LOGIN_BUTTON_TEXT,
- 'SAML2_LOGOUT_URL': settings.SAML2_LOGOUT_URL}
+ return {
+ "SHOW_LOGIN_FORM": settings.SHOW_LOGIN_FORM,
+ "FORGOT_PASSWORD": settings.FORGOT_PASSWORD,
+ "FORGOT_USERNAME": settings.FORGOT_USERNAME,
+ "CLASSIC_AUTH_ENABLED": settings.CLASSIC_AUTH_ENABLED,
+ "AUTH0_ENABLED": settings.AUTH0_OAUTH2_ENABLED,
+ "GOOGLE_ENABLED": settings.GOOGLE_OAUTH_ENABLED,
+ "OKTA_ENABLED": settings.OKTA_OAUTH_ENABLED,
+ "GITLAB_ENABLED": settings.GITLAB_OAUTH2_ENABLED,
+ "AZUREAD_TENANT_OAUTH2_ENABLED": settings.AZUREAD_TENANT_OAUTH2_ENABLED,
+ "AZUREAD_TENANT_OAUTH2_GET_GROUPS": settings.AZUREAD_TENANT_OAUTH2_GET_GROUPS,
+ "AZUREAD_TENANT_OAUTH2_GROUPS_FILTER": settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER,
+ "AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS": settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS,
+ "KEYCLOAK_ENABLED": settings.KEYCLOAK_OAUTH2_ENABLED,
+ "SOCIAL_AUTH_KEYCLOAK_LOGIN_BUTTON_TEXT": settings.SOCIAL_AUTH_KEYCLOAK_LOGIN_BUTTON_TEXT,
+ "GITHUB_ENTERPRISE_ENABLED": settings.GITHUB_ENTERPRISE_OAUTH2_ENABLED,
+ "SAML2_ENABLED": settings.SAML2_ENABLED,
+ "SAML2_LOGIN_BUTTON_TEXT": settings.SAML2_LOGIN_BUTTON_TEXT,
+ "SAML2_LOGOUT_URL": settings.SAML2_LOGOUT_URL,
+ "DOCUMENTATION_URL": settings.DOCUMENTATION_URL,
+ "API_TOKENS_ENABLED": settings.API_TOKENS_ENABLED,
+ }
def bind_system_settings(request):
from dojo.models import System_Settings
- return {'system_settings': System_Settings.objects.get()}
+
+ return {"system_settings": System_Settings.objects.get()}
def bind_alert_count(request):
if not settings.DISABLE_ALERT_COUNTER:
from dojo.models import Alerts
- if request.user.is_authenticated:
- return {'alert_count': Alerts.objects.filter(user_id=request.user).count()}
+
+ if hasattr(request, "user") and request.user.is_authenticated:
+ return {"alert_count": Alerts.objects.filter(user_id=request.user).count()}
return {}
+
+
+def bind_announcement(request):
+ from dojo.models import UserAnnouncement
+
+ try:
+ if request.user.is_authenticated:
+ user_announcement = UserAnnouncement.objects.select_related(
+ "announcement"
+ ).get(user=request.user)
+ return {"announcement": user_announcement.announcement}
+ return {}
+ except Exception:
+ return {}
diff --git a/dojo/cred/queries.py b/dojo/cred/queries.py
new file mode 100644
index 00000000000..09a37ec1944
--- /dev/null
+++ b/dojo/cred/queries.py
@@ -0,0 +1,51 @@
+from crum import get_current_user
+from django.db.models import Exists, OuterRef, Q
+from dojo.models import Cred_Mapping, Product_Member, Product_Type_Member, \
+ Product_Group, Product_Type_Group
+from dojo.authorization.authorization import get_roles_for_permission, user_has_global_permission
+
+
+def get_authorized_cred_mappings(permission, queryset=None):
+ user = get_current_user()
+
+ if user is None:
+ return Cred_Mapping.objects.none()
+
+ if queryset is None:
+ cred_mappings = Cred_Mapping.objects.all()
+ else:
+ cred_mappings = queryset
+
+ if user.is_superuser:
+ return cred_mappings
+
+ if user_has_global_permission(user, permission):
+ return cred_mappings
+
+ roles = get_roles_for_permission(permission)
+ authorized_product_type_roles = Product_Type_Member.objects.filter(
+ product_type=OuterRef('product__prod_type_id'),
+ user=user,
+ role__in=roles)
+ authorized_product_roles = Product_Member.objects.filter(
+ product=OuterRef('product_id'),
+ user=user,
+ role__in=roles)
+ authorized_product_type_groups = Product_Type_Group.objects.filter(
+ product_type=OuterRef('product__prod_type_id'),
+ group__users=user,
+ role__in=roles)
+ authorized_product_groups = Product_Group.objects.filter(
+ product=OuterRef('product_id'),
+ group__users=user,
+ role__in=roles)
+ cred_mappings = cred_mappings.annotate(
+ product__prod_type__member=Exists(authorized_product_type_roles),
+ product__member=Exists(authorized_product_roles),
+ product__prod_type__authorized_group=Exists(authorized_product_type_groups),
+ product__authorized_group=Exists(authorized_product_groups))
+ cred_mappings = cred_mappings.filter(
+ Q(product__prod_type__member=True) | Q(product__member=True) |
+ Q(product__prod_type__authorized_group=True) | Q(product__authorized_group=True))
+
+ return cred_mappings
diff --git a/dojo/cred/urls.py b/dojo/cred/urls.py
index 3d1d12f040b..3459418bd26 100644
--- a/dojo/cred/urls.py
+++ b/dojo/cred/urls.py
@@ -1,28 +1,27 @@
-from django.conf.urls import url
+from django.urls import re_path
from . import views
urlpatterns = [
- url(r'^cred/add', views.new_cred, name='add_cred'),
- url(r'^cred/(?P\d+)/view$', views.view_cred_details, name='view_cred_details'),
- url(r'^cred/(?P\d+)/edit$', views.edit_cred, name='edit_cred'),
- url(r'^cred/(?P\d+)/delete$', views.delete_cred, name='delete_cred'),
- url(r'^cred/(?P\d+)/selenium$', views.view_selenium, name='view_selenium'),
- url(r'^cred$', views.cred, name='cred'),
- url(r'^product/(?P\d+)/cred/add$', views.new_cred_product, name='new_cred_product'),
- url(r'^product/(?P\d+)/cred/all$', views.all_cred_product, name='all_cred_product'),
- url(r'^product/(?P\d+)/cred/(?P\d+)/edit$', views.edit_cred_product, name='edit_cred_product'),
- url(r'^product/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_product, name='view_cred_product'),
- url(r'^product/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_product, name='delete_cred_product'),
- url(r'^engagement/(?P\d+)/cred/add$', views.new_cred_product_engagement, name='new_cred_product_engagement'),
- url(r'^engagement/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_product_engagement,
+ re_path(r'^cred/add', views.new_cred, name='add_cred'),
+ re_path(r'^cred/(?P\d+)/view$', views.view_cred_details, name='view_cred_details'),
+ re_path(r'^cred/(?P\d+)/edit$', views.edit_cred, name='edit_cred'),
+ re_path(r'^cred/(?P\d+)/delete$', views.delete_cred, name='delete_cred'),
+ re_path(r'^cred$', views.cred, name='cred'),
+ re_path(r'^product/(?P\d+)/cred/add$', views.new_cred_product, name='new_cred_product'),
+ re_path(r'^product/(?P\d+)/cred/all$', views.all_cred_product, name='all_cred_product'),
+ re_path(r'^product/(?P\d+)/cred/(?P\d+)/edit$', views.edit_cred_product, name='edit_cred_product'),
+ re_path(r'^product/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_product, name='view_cred_product'),
+ re_path(r'^product/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_product, name='delete_cred_product'),
+ re_path(r'^engagement/(?P\d+)/cred/add$', views.new_cred_product_engagement, name='new_cred_product_engagement'),
+ re_path(r'^engagement/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_product_engagement,
name='view_cred_product_engagement'),
- url(r'^engagement/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_engagement,
+ re_path(r'^engagement/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_engagement,
name='delete_cred_engagement'),
- url(r'^test/(?P\d+)/cred/add$', views.new_cred_engagement_test, name='new_cred_engagement_test'),
- url(r'^test/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_engagement_test,
+ re_path(r'^test/(?P\d+)/cred/add$', views.new_cred_engagement_test, name='new_cred_engagement_test'),
+ re_path(r'^test/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_engagement_test,
name='view_cred_engagement_test'),
- url(r'^test/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_test, name='delete_cred_test'),
- url(r'^finding/(?P\d+)/cred/add$', views.new_cred_finding, name='new_cred_finding'),
- url(r'^finding/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_finding, name='view_cred_finding'),
- url(r'^finding/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_finding, name='delete_cred_finding'),
+ re_path(r'^test/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_test, name='delete_cred_test'),
+ re_path(r'^finding/(?P\d+)/cred/add$', views.new_cred_finding, name='new_cred_finding'),
+ re_path(r'^finding/(?P\d+)/cred/(?P\d+)/view$', views.view_cred_finding, name='view_cred_finding'),
+ re_path(r'^finding/(?P\d+)/cred/(?P\d+)/delete$', views.delete_cred_finding, name='delete_cred_finding'),
]
diff --git a/dojo/cred/views.py b/dojo/cred/views.py
index 9c16ee00c9a..53d3315be18 100644
--- a/dojo/cred/views.py
+++ b/dojo/cred/views.py
@@ -1,24 +1,24 @@
import logging
-import os
from django.contrib import messages
-from django.contrib.auth.decorators import user_passes_test
from django.urls import reverse
-from django.http import HttpResponseRedirect, StreamingHttpResponse
+from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from dojo.models import Finding, Product, Engagement, Cred_User, Cred_Mapping, Test
from dojo.utils import add_breadcrumb, Product_Tab
from dojo.forms import CredUserForm, NoteForm, CredMappingFormProd, CredMappingForm
-from dojo.utils import dojo_crypto_encrypt, prepare_for_view, FileIterWrapper
+from dojo.utils import dojo_crypto_encrypt, prepare_for_view
from dojo.authorization.authorization_decorators import user_is_authorized
from dojo.authorization.roles_permissions import Permissions
+from dojo.authorization.authorization_decorators import user_is_configuration_authorized
+from dojo.cred.queries import get_authorized_cred_mappings
logger = logging.getLogger(__name__)
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_configuration_authorized(Permissions.Credential_Add)
def new_cred(request):
if request.method == 'POST':
tform = CredUserForm(request.POST)
@@ -45,11 +45,11 @@ def all_cred_product(request, pid):
prod = get_object_or_404(Product, id=pid)
creds = Cred_Mapping.objects.filter(product=prod).order_by('cred_id__name')
- product_tab = Product_Tab(prod.id, title="Credentials", tab="settings")
+ product_tab = Product_Tab(prod, title="Credentials", tab="settings")
return render(request, 'dojo/view_cred_prod.html', {'product_tab': product_tab, 'creds': creds, 'prod': prod})
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Cred_User, Permissions.Credential_Edit, 'ttid')
def edit_cred(request, ttid):
tool_config = Cred_User.objects.get(pk=ttid)
if request.method == 'POST':
@@ -81,12 +81,13 @@ def edit_cred(request, ttid):
})
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Cred_User, Permissions.Credential_View, 'ttid')
def view_cred_details(request, ttid):
cred = Cred_User.objects.get(pk=ttid)
notes = cred.notes.all()
cred_products = Cred_Mapping.objects.select_related('product').filter(
product_id__isnull=False, cred_id=ttid).order_by('product__name')
+ cred_products = get_authorized_cred_mappings(Permissions.Product_View, cred_products)
if request.method == 'POST':
form = NoteForm(request.POST)
@@ -117,7 +118,7 @@ def view_cred_details(request, ttid):
})
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_configuration_authorized(Permissions.Credential_View)
def cred(request):
confs = Cred_User.objects.all().order_by('name', 'environment', 'username')
add_breadcrumb(title="Credential Manager", top_level=True, request=request)
@@ -126,8 +127,8 @@ def cred(request):
})
-# The dialogue shows the credentials and there can only be viewed by superusers
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Product, Permissions.Product_View, 'pid')
+@user_is_authorized(Cred_User, Permissions.Credential_View, 'ttid')
def view_cred_product(request, pid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -182,8 +183,8 @@ def view_cred_product(request, pid, ttid):
})
-# The dialogue shows the credentials and there can only be viewed by superusers
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Product, Permissions.Engagement_View, 'eid')
+@user_is_authorized(Cred_User, Permissions.Credential_View, 'ttid')
def view_cred_product_engagement(request, eid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -213,11 +214,6 @@ def view_cred_product_engagement(request, eid, ttid):
title="Credential Manager", top_level=False, request=request)
cred_type = "Engagement"
edit_link = ""
- view_link = reverse(
- 'view_cred_product_engagement', args=(
- eid,
- cred.id,
- ))
delete_link = reverse(
'delete_cred_engagement', args=(
eid,
@@ -236,8 +232,8 @@ def view_cred_product_engagement(request, eid, ttid):
})
-# The dialogue shows the credentials and there can only be viewed by superusers
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Product, Permissions.Test_View, 'tid')
+@user_is_authorized(Cred_User, Permissions.Credential_View, 'ttid')
def view_cred_engagement_test(request, tid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -269,11 +265,6 @@ def view_cred_engagement_test(request, tid, ttid):
title="Credential Manager", top_level=False, request=request)
cred_type = "Test"
edit_link = None
- view_link = reverse(
- 'view_cred_engagement_test', args=(
- tid,
- cred.id,
- ))
delete_link = reverse(
'delete_cred_test', args=(
tid,
@@ -292,8 +283,8 @@ def view_cred_engagement_test(request, tid, ttid):
})
-# The dialogue shows the credentials and there can only be viewed by superusers
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Product, Permissions.Finding_View, 'fid')
+@user_is_authorized(Cred_User, Permissions.Credential_View, 'ttid')
def view_cred_finding(request, fid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -325,11 +316,6 @@ def view_cred_finding(request, fid, ttid):
title="Credential Manager", top_level=False, request=request)
cred_type = "Finding"
edit_link = None
- view_link = reverse(
- 'view_cred_finding', args=(
- fid,
- cred.id,
- ))
delete_link = reverse(
'delete_cred_finding', args=(
fid,
@@ -349,6 +335,7 @@ def view_cred_finding(request, fid, ttid):
@user_is_authorized(Product, Permissions.Product_Edit, 'pid')
+@user_is_authorized(Cred_User, Permissions.Credential_Edit, 'ttid')
def edit_cred_product(request, pid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -367,7 +354,7 @@ def edit_cred_product(request, pid, ttid):
else:
tform = CredMappingFormProd(instance=cred)
- product_tab = Product_Tab(prod.id, title="Edit Product Credential", tab="settings")
+ product_tab = Product_Tab(prod, title="Edit Product Credential", tab="settings")
return render(request, 'dojo/edit_cred_all.html', {
'tform': tform,
'product_tab': product_tab,
@@ -376,6 +363,7 @@ def edit_cred_product(request, pid, ttid):
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
+@user_is_authorized(Cred_User, Permissions.Credential_Edit, 'ttid')
def edit_cred_product_engagement(request, eid, ttid):
cred = get_object_or_404(
Cred_Mapping.objects.select_related('cred_id'), id=ttid)
@@ -434,7 +422,7 @@ def new_cred_product(request, pid):
else:
tform = CredMappingFormProd()
- product_tab = Product_Tab(pid, title="Add Credential Configuration", tab="settings")
+ product_tab = Product_Tab(prod, title="Add Credential Configuration", tab="settings")
return render(request, 'dojo/new_cred_product.html', {
'tform': tform,
@@ -595,6 +583,7 @@ def new_cred_finding(request, fid):
})
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred_controller(request, destination_url, id, ttid):
cred = None
try:
@@ -661,63 +650,49 @@ def delete_cred_controller(request, destination_url, id, ttid):
add_breadcrumb(title="Delete Credential", top_level=False, request=request)
product_tab = None
if id:
- pid = None
+ product = None
if destination_url == "all_cred_product":
- pid = id
+ product = get_object_or_404(Product, id)
elif destination_url == "view_engagement":
engagement = get_object_or_404(Engagement, id=id)
- pid = engagement.product.id
+ product = engagement.product
elif destination_url == "view_test":
test = get_object_or_404(Test, id=id)
- pid = test.engagement.product.id
+ product = test.engagement.product
elif destination_url == "view_finding":
finding = get_object_or_404(Finding, id=id)
- pid = finding.test.engagement.product.id
- product_tab = Product_Tab(pid, title="Delete Credential Mapping", tab="settings")
+ product = finding.test.engagement.product
+ product_tab = Product_Tab(product, title="Delete Credential Mapping", tab="settings")
return render(request, 'dojo/delete_cred_all.html', {
'tform': tform,
'product_tab': product_tab
})
-@user_passes_test(lambda u: u.is_superuser)
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred(request, ttid):
return delete_cred_controller(request, "cred", 0, ttid)
@user_is_authorized(Product, Permissions.Product_Edit, 'pid')
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred_product(request, pid, ttid):
return delete_cred_controller(request, "all_cred_product", pid, ttid)
@user_is_authorized(Engagement, Permissions.Engagement_Edit, 'eid')
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred_engagement(request, eid, ttid):
return delete_cred_controller(request, "view_engagement", eid, ttid)
@user_is_authorized(Test, Permissions.Test_Edit, 'tid')
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred_test(request, tid, ttid):
return delete_cred_controller(request, "view_test", tid, ttid)
@user_is_authorized(Finding, Permissions.Finding_Edit, 'fid')
+@user_is_authorized(Cred_User, Permissions.Credential_Delete, 'ttid')
def delete_cred_finding(request, fid, ttid):
return delete_cred_controller(request, "view_finding", fid, ttid)
-
-
-@user_passes_test(lambda u: u.is_superuser)
-def view_selenium(request, ttid):
- import mimetypes
-
- mimetypes.init()
- cred = Cred_Mapping.objects.get(pk=ttid)
- # print(cred.cred_id.selenium_script)
- # mimetype, encoding = mimetypes.guess_type(cred.cred_id.selenium_script)
- response = StreamingHttpResponse(
- FileIterWrapper(open(cred.cred_id.selenium_script)))
- fileName, fileExtension = os.path.splitext(cred.cred_id.selenium_script)
- response[
- 'Content-Disposition'] = 'attachment; filename=selenium_script' + fileExtension
- response['Content-Type'] = mimetypes
-
- return response
diff --git a/dojo/db_migrations/0064_jira_refactor_populate.py b/dojo/db_migrations/0064_jira_refactor_populate.py
index 4ea317f87c8..c0e9078e5d9 100644
--- a/dojo/db_migrations/0064_jira_refactor_populate.py
+++ b/dojo/db_migrations/0064_jira_refactor_populate.py
@@ -78,7 +78,7 @@ def populate_jira_instance_name_if_empty(apps, schema_editor):
logger.info('done with data migration, now removing some fields which may take a while depending on the amount of findings')
def show_info(apps, schema_editor):
- logger.info('this migration should have run succesfully. if not, there is a Django Management command to manually run the data conversion')
+ logger.info('this migration should have run successfully. if not, there is a Django Management command to manually run the data conversion')
logger.info('for docker-compose execute: docker-compose exec uwsgi ./manage.py jira_refactor_data_migration')
operations = [
diff --git a/dojo/db_migrations/0066_django_tagulous.py b/dojo/db_migrations/0066_django_tagulous.py
index 6e18fa8ec75..7727e8179b1 100644
--- a/dojo/db_migrations/0066_django_tagulous.py
+++ b/dojo/db_migrations/0066_django_tagulous.py
@@ -42,7 +42,7 @@ def copy_existing_tags_to_tags_from_django_tagging_field(apps, schema_editor):
prod_type_lost_and_found, created = Product_Type_Model.objects.get_or_create(name='_tag migration lost and found')
obj.prod_type = prod_type_lost_and_found
obj.save()
- logger.warning('product type succesfully changed to %i', prod_type_lost_and_found.id)
+ logger.warning('product type successfully changed to %i', prod_type_lost_and_found.id)
obj.save()
except Exception as e:
diff --git a/dojo/db_migrations/0118_remove_finding_images.py b/dojo/db_migrations/0118_remove_finding_images.py
index bffc069abda..7de9df25193 100644
--- a/dojo/db_migrations/0118_remove_finding_images.py
+++ b/dojo/db_migrations/0118_remove_finding_images.py
@@ -37,7 +37,7 @@ def move_images_to_files(apps, schema_editor):
if not passed:
finding.files.add(file)
else:
- logger.warn('unable to migrate image %s with caption %s', image.image.name, image.caption)
+ logger.warning('unable to migrate image %s with caption %s', image.image.name, image.caption)
class Migration(migrations.Migration):
diff --git a/dojo/db_migrations/0145_system_settings_default_group_email_pattern.py b/dojo/db_migrations/0145_system_settings_default_group_email_pattern.py
new file mode 100644
index 00000000000..a2bd8da882d
--- /dev/null
+++ b/dojo/db_migrations/0145_system_settings_default_group_email_pattern.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.11 on 2022-01-10 19:10
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0144_import_action_untouched'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='default_group_email_pattern',
+ field=models.CharField(blank=True, default='', help_text='New users will only be assigned to the default group, when their email address matches this regex pattern. This is optional condition.', max_length=200),
+ ),
+ ]
diff --git a/dojo/db_migrations/0146_lead_optional.py b/dojo/db_migrations/0146_lead_optional.py
new file mode 100644
index 00000000000..a5ac62afd55
--- /dev/null
+++ b/dojo/db_migrations/0146_lead_optional.py
@@ -0,0 +1,24 @@
+# Generated by Django 3.2.11 on 2022-01-16 17:56
+
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("dojo", "0145_system_settings_default_group_email_pattern"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="engagement",
+ name="lead",
+ field=models.ForeignKey(editable=True, null=True, blank=True, on_delete=models.RESTRICT, to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name="test",
+ name="lead",
+ field=models.ForeignKey(editable=True, null=True, blank=True, on_delete=models.RESTRICT, to=settings.AUTH_USER_MODEL),
+ ),
+ ]
diff --git a/dojo/db_migrations/0147_rename_sslyze_parser.py b/dojo/db_migrations/0147_rename_sslyze_parser.py
new file mode 100644
index 00000000000..1c357d37122
--- /dev/null
+++ b/dojo/db_migrations/0147_rename_sslyze_parser.py
@@ -0,0 +1,23 @@
+from django.db import migrations
+
+
+def rename_sslyze_parser(apps, schema_editor):
+ Test_Type_model = apps.get_model('dojo', 'Test_Type')
+ try:
+ test_type_sslyze = Test_Type_model.objects.get(name='SSLyze 3 Scan (JSON)')
+ test_type_sslyze.name = 'SSLyze Scan (JSON)'
+ test_type_sslyze.save()
+ except Test_Type_model.DoesNotExist:
+ # This happens when a new instance of DD is initialized
+ pass
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0146_lead_optional'),
+ ]
+
+ operations = [
+ migrations.RunPython(rename_sslyze_parser),
+ ]
diff --git a/dojo/db_migrations/0148_default_notifications.py b/dojo/db_migrations/0148_default_notifications.py
new file mode 100644
index 00000000000..89fdbfb9443
--- /dev/null
+++ b/dojo/db_migrations/0148_default_notifications.py
@@ -0,0 +1,89 @@
+# Generated by Django 3.2.11 on 2022-02-06 20:23
+
+from django.db import migrations
+import multiselectfield.db.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0147_rename_sslyze_parser'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='notifications',
+ name='auto_close_engagement',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='close_engagement',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='code_review',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='engagement_added',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='jira_update',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=24, verbose_name='JIRA problems'),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='other',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='product_added',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='product_type_added',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='review_requested',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='scan_added',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='sla_breach',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches', max_length=24, verbose_name='SLA breach'),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='stale_engagement',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='test_added',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='upcoming_engagement',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ migrations.AlterField(
+ model_name='notifications',
+ name='user_mentioned',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), max_length=24),
+ ),
+ ]
diff --git a/dojo/db_migrations/0149_harmonize_user_format.py b/dojo/db_migrations/0149_harmonize_user_format.py
new file mode 100644
index 00000000000..c38a1855e29
--- /dev/null
+++ b/dojo/db_migrations/0149_harmonize_user_format.py
@@ -0,0 +1,109 @@
+# Generated by Django 3.2.11 on 2022-02-22 08:24
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0148_default_notifications'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='alerts',
+ name='user_id',
+ field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='answered_survey',
+ name='assignee',
+ field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='assignee', to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='answered_survey',
+ name='responder',
+ field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='responder', to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='app_analysis',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='endpoint_status',
+ name='mitigated_by',
+ field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='engagement',
+ name='lead',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='fileaccesstoken',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='finding',
+ name='last_reviewed_by',
+ field=models.ForeignKey(editable=False, help_text='Provides the person who last reviewed the flaw.', null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='last_reviewed_by', to='dojo.dojo_user', verbose_name='Last Reviewed By'),
+ ),
+ migrations.AlterField(
+ model_name='finding',
+ name='mitigated_by',
+ field=models.ForeignKey(editable=False, help_text='Documents who has marked this flaw as fixed.', null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='mitigated_by', to='dojo.dojo_user', verbose_name='Mitigated By'),
+ ),
+ migrations.AlterField(
+ model_name='finding',
+ name='reporter',
+ field=models.ForeignKey(default=1, editable=False, help_text='Documents who reported the flaw.', on_delete=django.db.models.deletion.RESTRICT, related_name='reporter', to='dojo.dojo_user', verbose_name='Reporter'),
+ ),
+ migrations.AlterField(
+ model_name='finding',
+ name='reviewers',
+ field=models.ManyToManyField(blank=True, help_text='Documents who reviewed the flaw.', to='dojo.Dojo_User', verbose_name='Reviewers'),
+ ),
+ migrations.AlterField(
+ model_name='global_role',
+ name='user',
+ field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='languages',
+ name='user',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='notehistory',
+ name='current_editor',
+ field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='notes',
+ name='author',
+ field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='editor_notes_set', to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='notes',
+ name='editor',
+ field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='author_notes_set', to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='stub_finding',
+ name='reporter',
+ field=models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='test',
+ name='lead',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.RESTRICT, to='dojo.dojo_user'),
+ ),
+ migrations.AlterField(
+ model_name='usercontactinfo',
+ name='user',
+ field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0150_dedupe_endpoint_status.py b/dojo/db_migrations/0150_dedupe_endpoint_status.py
new file mode 100644
index 00000000000..3947ce5d123
--- /dev/null
+++ b/dojo/db_migrations/0150_dedupe_endpoint_status.py
@@ -0,0 +1,54 @@
+from django.db import migrations
+from django.db.models import Count, Q
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0149_harmonize_user_format'),
+ ]
+
+ def dedupe_endpoint_status(apps, schema_editor):
+ Endpoint_Status = apps.get_model('dojo', 'endpoint_status')
+ Endpoint = apps.get_model('dojo', 'endpoint')
+ Finding = apps.get_model('dojo', 'finding')
+
+ to_process = Endpoint_Status.objects.exclude(Q(endpoint=None) | Q(finding=None))\
+ .values('finding', 'endpoint').annotate(cnt=Count('id')).filter(cnt__gt=1)
+ if to_process.count() == 0:
+ logger.info('There is nothing to process')
+ else:
+ logger.warning('We identified %s group(s) of endpoint status which needs to be deduplicated',
+ to_process.count())
+
+ for eps_group in to_process:
+
+ finding = Finding.objects.get(id=eps_group.get('finding'))
+ ep = Endpoint.objects.get(id=eps_group.get('endpoint'))
+ epss = Endpoint_Status.objects.filter(finding=finding, endpoint=ep)
+
+ # we need to identify, when first was created
+ first_date = epss.order_by('date').first().date
+
+ # next we need to know, which store the most recent information
+ last_id = epss.order_by('last_modified').last().id
+
+ logger.debug('Redundant endpoint statuses on finding: "%s" & endpoint "%s" will be removed. We are '
+ 'keeping only id: "%s" and we are setting date of the first identification: %s',
+ str(finding), str(ep), last_id, first_date)
+
+ # Remove all except of the most fresh one
+ Endpoint_Status.objects.filter(finding=eps_group.get('finding'),
+ endpoint=eps_group.get('endpoint')).exclude(id=last_id).delete()
+
+ # Use the date from the oldest one
+ eps = Endpoint_Status.objects.get(id=last_id)
+ eps.date = first_date
+ eps.save()
+
+ operations = [
+ migrations.RunPython(dedupe_endpoint_status)
+ ]
diff --git a/dojo/db_migrations/0151_index_endpoint_status.py b/dojo/db_migrations/0151_index_endpoint_status.py
new file mode 100644
index 00000000000..da5dbd21937
--- /dev/null
+++ b/dojo/db_migrations/0151_index_endpoint_status.py
@@ -0,0 +1,17 @@
+# Generated by Django 3.2.12 on 2022-02-22 16:00
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0150_dedupe_endpoint_status'),
+ ]
+
+ operations = [
+ migrations.AddConstraint(
+ model_name='endpoint_status',
+ constraint=models.UniqueConstraint(fields=('finding', 'endpoint'), name='endpoint-finding relation'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0152_notifications_template.py b/dojo/db_migrations/0152_notifications_template.py
new file mode 100644
index 00000000000..623f92caf26
--- /dev/null
+++ b/dojo/db_migrations/0152_notifications_template.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.11 on 2022-01-25 17:10
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0151_index_endpoint_status'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notifications',
+ name='template',
+ field=models.BooleanField(default=False),
+ ),
+ ]
diff --git a/dojo/db_migrations/0153_migrate_endpoint_mitigated.py b/dojo/db_migrations/0153_migrate_endpoint_mitigated.py
new file mode 100644
index 00000000000..143785d9c10
--- /dev/null
+++ b/dojo/db_migrations/0153_migrate_endpoint_mitigated.py
@@ -0,0 +1,41 @@
+from django.db import migrations
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0152_notifications_template'),
+ ]
+
+ def migrate_endpoint_mitigated(apps, schema_editor):
+ Endpoint = apps.get_model('dojo', 'Endpoint')
+ Endpoint_Status = apps.get_model('dojo', 'Endpoint_Status')
+
+ all_ep = Endpoint.objects.filter(mitigated=True)
+
+ if all_ep.count() == 0:
+ logger.info('There is nothing to process')
+ else:
+ logger.warning('We identified %s endpoints marked as Mitigated and their status will be updated',
+ all_ep.count())
+
+ for ep in all_ep:
+ epss = Endpoint_Status.objects.select_related('finding').filter(endpoint=ep, mitigated=False)
+ for eps in epss:
+ eps.date = eps.finding.date
+ eps.mitigated = True
+ eps.mitigated_by = eps.finding.reporter
+ eps.save()
+ logger.debug('Status for finding "%s" on endpoint "%s" marked as mitigated at "%s" by "%s"',
+ str(eps.finding),
+ str(ep),
+ eps.date,
+ eps.mitigated_by
+ )
+
+ operations = [
+ migrations.RunPython(migrate_endpoint_mitigated)
+ ]
diff --git a/dojo/db_migrations/0154_remove_endpoint_mitigated.py b/dojo/db_migrations/0154_remove_endpoint_mitigated.py
new file mode 100644
index 00000000000..b505e9d0e92
--- /dev/null
+++ b/dojo/db_migrations/0154_remove_endpoint_mitigated.py
@@ -0,0 +1,25 @@
+# Generated by Django 3.2.12 on 2022-02-23 15:36
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0153_migrate_endpoint_mitigated'),
+ ]
+
+ operations = [
+ migrations.AddIndex(
+ model_name='endpoint',
+ index=models.Index(fields=['product'], name='dojo_endpoi_product_d53fb3_idx'),
+ ),
+ migrations.RemoveIndex(
+ model_name='endpoint',
+ name='dojo_endpoi_product_b80e9a_idx',
+ ),
+ migrations.RemoveField(
+ model_name='endpoint',
+ name='mitigated',
+ ),
+ ]
diff --git a/dojo/db_migrations/0155_enable_finding_groups.py b/dojo/db_migrations/0155_enable_finding_groups.py
new file mode 100755
index 00000000000..8018c0c457c
--- /dev/null
+++ b/dojo/db_migrations/0155_enable_finding_groups.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.12 on 2022-03-23 07:59
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0154_remove_endpoint_mitigated'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_finding_groups',
+ field=models.BooleanField(default=True, help_text='With this setting turned off, the Finding Groups will be disabled.', verbose_name='Enable Finding Groups'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0156_migrate_finding_groups_setting.py b/dojo/db_migrations/0156_migrate_finding_groups_setting.py
new file mode 100755
index 00000000000..29497f11336
--- /dev/null
+++ b/dojo/db_migrations/0156_migrate_finding_groups_setting.py
@@ -0,0 +1,32 @@
+# Generated by Django 3.2.12 on 2022-03-23 07:59
+
+from django.db import migrations
+from django.conf import settings
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def migrate_from_settings_file(apps, schema_editor):
+
+ if hasattr(settings, 'FEATURE_FINDING_GROUPS'):
+ system_settings_model = apps.get_model('dojo', 'System_Settings')
+ logger.info('Migrating value from FEATURE_FINDING_GROUPS into system settings model')
+ try:
+ system_setting = system_settings_model.objects.get()
+ system_setting.enable_finding_groups = settings.FEATURE_FINDING_GROUPS
+ system_setting.save()
+ except:
+ # for a clean installation there is no system_settings record, so just ignore it
+ pass
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0155_enable_finding_groups'),
+ ]
+
+ operations = [
+ migrations.RunPython(migrate_from_settings_file),
+ ]
diff --git a/dojo/db_migrations/0157_vulnerability_reference.py b/dojo/db_migrations/0157_vulnerability_reference.py
new file mode 100644
index 00000000000..91f8d8fb49a
--- /dev/null
+++ b/dojo/db_migrations/0157_vulnerability_reference.py
@@ -0,0 +1,40 @@
+# Generated by Django 3.2.12 on 2022-02-06 14:43
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0156_migrate_finding_groups_setting'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='finding',
+ name='cve',
+ field=models.CharField(help_text='A reference to a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Reference'),
+ ),
+ migrations.AlterField(
+ model_name='finding_template',
+ name='cve',
+ field=models.CharField(help_text='A reference to a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Reference'),
+ ),
+ migrations.CreateModel(
+ name='Vulnerability_Reference',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability_reference', models.TextField(max_length=50)),
+ ('finding', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Vulnerability_Reference_Template',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability_reference', models.TextField(max_length=50)),
+ ('finding_template', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding_template')),
+ ],
+ ),
+ ]
diff --git a/dojo/db_migrations/0158_vulnerability_id.py b/dojo/db_migrations/0158_vulnerability_id.py
new file mode 100644
index 00000000000..7406be19cba
--- /dev/null
+++ b/dojo/db_migrations/0158_vulnerability_id.py
@@ -0,0 +1,41 @@
+# Generated by Django 3.2.13 on 2022-04-26 16:28
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0157_vulnerability_reference'),
+ ]
+
+ operations = [
+ migrations.RenameModel(
+ old_name='Vulnerability_Reference',
+ new_name='Vulnerability_Id',
+ ),
+ migrations.RenameModel(
+ old_name='Vulnerability_Reference_Template',
+ new_name='Vulnerability_Id_Template',
+ ),
+ migrations.RenameField(
+ model_name='vulnerability_id',
+ old_name='vulnerability_reference',
+ new_name='vulnerability_id',
+ ),
+ migrations.RenameField(
+ model_name='vulnerability_id_template',
+ old_name='vulnerability_reference',
+ new_name='vulnerability_id',
+ ),
+ migrations.AlterField(
+ model_name='finding',
+ name='cve',
+ field=models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id'),
+ ),
+ migrations.AlterField(
+ model_name='finding_template',
+ name='cve',
+ field=models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0159_remove_broken_endpoint_statuses.py b/dojo/db_migrations/0159_remove_broken_endpoint_statuses.py
new file mode 100644
index 00000000000..36d14def3c6
--- /dev/null
+++ b/dojo/db_migrations/0159_remove_broken_endpoint_statuses.py
@@ -0,0 +1,16 @@
+from django.db import migrations
+from dojo.endpoint.utils import remove_broken_endpoint_statuses
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0158_vulnerability_id'),
+ ]
+
+ def remove_broken_endpoint_statuses_local(apps, schema_editor):
+ remove_broken_endpoint_statuses(apps=apps)
+
+ operations = [
+ migrations.RunPython(remove_broken_endpoint_statuses_local)
+ ]
diff --git a/dojo/db_migrations/0160_set_notnull_endpoint_statuses.py b/dojo/db_migrations/0160_set_notnull_endpoint_statuses.py
new file mode 100644
index 00000000000..de676c3e9c9
--- /dev/null
+++ b/dojo/db_migrations/0160_set_notnull_endpoint_statuses.py
@@ -0,0 +1,24 @@
+# Generated by Django 3.2.12 on 2022-04-06 21:47
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0159_remove_broken_endpoint_statuses'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='endpoint_status',
+ name='endpoint',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='status_endpoint', to='dojo.endpoint'),
+ ),
+ migrations.AlterField(
+ model_name='endpoint_status',
+ name='finding',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='status_finding', to='dojo.finding'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0161_alter_dojo_group_social_provider.py b/dojo/db_migrations/0161_alter_dojo_group_social_provider.py
new file mode 100644
index 00000000000..524c0d901b6
--- /dev/null
+++ b/dojo/db_migrations/0161_alter_dojo_group_social_provider.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.12 on 2022-04-05 14:35
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0160_set_notnull_endpoint_statuses'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='dojo_group',
+ name='social_provider',
+ field=models.CharField(blank=True, choices=[('AzureAD', 'AzureAD')], help_text='Group imported from a social provider.', max_length=10, null=True, verbose_name='Social Authentication Provider'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0162_created_and_updated.py b/dojo/db_migrations/0162_created_and_updated.py
new file mode 100644
index 00000000000..282a69d4e96
--- /dev/null
+++ b/dojo/db_migrations/0162_created_and_updated.py
@@ -0,0 +1,128 @@
+# Generated by Django 3.2.13 on 2022-06-03 20:27
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0161_alter_dojo_group_social_provider'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='alerts',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='app_analysis',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_category',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_category',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_product',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_product',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_product_summary',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_product_summary',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_requirement',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_requirement',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_type',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='benchmark_type',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='contact',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='languages',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='objects_product',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='objects_review',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='product',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='product',
+ name='updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='sonarqube_issue_transition',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='testing_guide',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='testing_guide',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ migrations.AlterField(
+ model_name='testing_guide_category',
+ name='created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AlterField(
+ model_name='testing_guide_category',
+ name='updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ ]
diff --git a/dojo/db_migrations/0163_system_settings_enable_calendar.py b/dojo/db_migrations/0163_system_settings_enable_calendar.py
new file mode 100644
index 00000000000..4355259bbeb
--- /dev/null
+++ b/dojo/db_migrations/0163_system_settings_enable_calendar.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.13 on 2022-06-18 16:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0162_created_and_updated'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_calendar',
+ field=models.BooleanField(default=True, help_text='With this setting turned off, the Calendar will be disabled in the user interface.', verbose_name='Enable Calendar'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0164_remove_system_settings_staff_user_email_pattern.py b/dojo/db_migrations/0164_remove_system_settings_staff_user_email_pattern.py
new file mode 100644
index 00000000000..76311977f41
--- /dev/null
+++ b/dojo/db_migrations/0164_remove_system_settings_staff_user_email_pattern.py
@@ -0,0 +1,17 @@
+# Generated by Django 3.2.13 on 2022-06-22 04:41
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0163_system_settings_enable_calendar'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='staff_user_email_pattern',
+ ),
+ ]
diff --git a/dojo/db_migrations/0165_custom_sla.py b/dojo/db_migrations/0165_custom_sla.py
new file mode 100644
index 00000000000..1bc49da871b
--- /dev/null
+++ b/dojo/db_migrations/0165_custom_sla.py
@@ -0,0 +1,59 @@
+# Generated by Django 3.2.13 on 2022-05-28 20:06
+import logging
+
+from django.db import migrations, models
+
+logger = logging.getLogger(__name__)
+
+
+# def save_existing_sla(apps, schema_editor):
+# system_settings_model = apps.get_model('dojo', 'System_Settings')
+#
+# try:
+# system_settings = system_settings_model.objects.get()
+# critical = system_settings.sla_critical,
+# high = system_settings.sla_high,
+# medium = system_settings.sla_medium,
+# low = system_settings.sla_low
+# except:
+# critical = 7
+# high = 30
+# medium = 90
+# low = 120
+#
+# SLA_Configuration = apps.get_model('dojo', 'SLA_Configuration')
+# SLA_Configuration.objects.create(name='Default',
+# description='The Default SLA Configuration. Products not using an explicit SLA Configuration will use this one.',
+# critical=critical,
+# high=high,
+# medium=medium,
+# low=low)
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ('dojo', '0164_remove_system_settings_staff_user_email_pattern'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='SLA_Configuration',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(help_text='A unique name for the set of SLAs.', max_length=128, unique=True,
+ verbose_name='Custom SLA Name')),
+ ('description', models.CharField(blank=True, max_length=512, null=True)),
+ ('critical', models.IntegerField(default=7, help_text='number of days to remediate a critical finding.',
+ verbose_name='Critical Finding SLA Days')),
+ ('high', models.IntegerField(default=30, help_text='number of days to remediate a high finding.',
+ verbose_name='High Finding SLA Days')),
+ ('medium', models.IntegerField(default=90, help_text='number of days to remediate a medium finding.',
+ verbose_name='Medium Finding SLA Days')),
+ ('low', models.IntegerField(default=120, help_text='number of days to remediate a low finding.',
+ verbose_name='Low Finding SLA Days')),
+ ],
+ options={
+ 'ordering': ['name'],
+ },
+ )
+ ]
diff --git a/dojo/db_migrations/0166_copy_sla_from_system_settings.py b/dojo/db_migrations/0166_copy_sla_from_system_settings.py
new file mode 100644
index 00000000000..04ca17cccbf
--- /dev/null
+++ b/dojo/db_migrations/0166_copy_sla_from_system_settings.py
@@ -0,0 +1,65 @@
+# Generated by Django 3.2.14 on 2022-07-28 13:11
+import logging
+
+import django.db.models.deletion
+
+from django.db import migrations, models
+
+logger = logging.getLogger(__name__)
+
+
+def save_existing_sla(apps, schema_editor):
+ system_settings_model = apps.get_model('dojo', 'System_Settings')
+
+ try:
+ system_settings = system_settings_model.objects.get()
+ critical = system_settings.sla_critical
+ high = system_settings.sla_high
+ medium = system_settings.sla_medium
+ low = system_settings.sla_low
+
+ except:
+ critical = 7
+ high = 30
+ medium = 90
+ low = 120
+
+ sla_config = apps.get_model('dojo', 'SLA_Configuration')
+ sla_config.objects.create(name='Default',
+ description='The Default SLA Configuration. Products not using an explicit SLA Configuration will use this one.',
+ critical=critical,
+ high=high,
+ medium=medium,
+ low=low)
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ('dojo', '0165_custom_sla'),
+ ]
+
+ operations = [
+ migrations.RunPython(save_existing_sla),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='sla_critical',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='sla_high',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='sla_low',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='sla_medium',
+ ),
+ migrations.AddField(
+ model_name='product',
+ name='sla_configuration',
+ field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.RESTRICT, related_name='sla_config',
+ to='dojo.sla_configuration'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0167_system_settings_add_vulnerability_id_to_jira_label.py b/dojo/db_migrations/0167_system_settings_add_vulnerability_id_to_jira_label.py
new file mode 100644
index 00000000000..51dc27d218c
--- /dev/null
+++ b/dojo/db_migrations/0167_system_settings_add_vulnerability_id_to_jira_label.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.15 on 2022-08-10 12:34
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0166_copy_sla_from_system_settings'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='add_vulnerability_id_to_jira_label',
+ field=models.BooleanField(default=False, verbose_name='Add vulnerability Id as a JIRA label'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0168_alter_system_settings_time_zone.py b/dojo/db_migrations/0168_alter_system_settings_time_zone.py
new file mode 100644
index 00000000000..c8ddd275b3f
--- /dev/null
+++ b/dojo/db_migrations/0168_alter_system_settings_time_zone.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.15 on 2022-08-15 13:00
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0167_system_settings_add_vulnerability_id_to_jira_label'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='system_settings',
+ name='time_zone',
+ field=models.CharField(choices=[('Africa/Abidjan', 'Africa/Abidjan'), ('Africa/Accra', 'Africa/Accra'), ('Africa/Addis_Ababa', 'Africa/Addis_Ababa'), ('Africa/Algiers', 'Africa/Algiers'), ('Africa/Asmara', 'Africa/Asmara'), ('Africa/Asmera', 'Africa/Asmera'), ('Africa/Bamako', 'Africa/Bamako'), ('Africa/Bangui', 'Africa/Bangui'), ('Africa/Banjul', 'Africa/Banjul'), ('Africa/Bissau', 'Africa/Bissau'), ('Africa/Blantyre', 'Africa/Blantyre'), ('Africa/Brazzaville', 'Africa/Brazzaville'), ('Africa/Bujumbura', 'Africa/Bujumbura'), ('Africa/Cairo', 'Africa/Cairo'), ('Africa/Casablanca', 'Africa/Casablanca'), ('Africa/Ceuta', 'Africa/Ceuta'), ('Africa/Conakry', 'Africa/Conakry'), ('Africa/Dakar', 'Africa/Dakar'), ('Africa/Dar_es_Salaam', 'Africa/Dar_es_Salaam'), ('Africa/Djibouti', 'Africa/Djibouti'), ('Africa/Douala', 'Africa/Douala'), ('Africa/El_Aaiun', 'Africa/El_Aaiun'), ('Africa/Freetown', 'Africa/Freetown'), ('Africa/Gaborone', 'Africa/Gaborone'), ('Africa/Harare', 'Africa/Harare'), ('Africa/Johannesburg', 'Africa/Johannesburg'), ('Africa/Juba', 'Africa/Juba'), ('Africa/Kampala', 'Africa/Kampala'), ('Africa/Khartoum', 'Africa/Khartoum'), ('Africa/Kigali', 'Africa/Kigali'), ('Africa/Kinshasa', 'Africa/Kinshasa'), ('Africa/Lagos', 'Africa/Lagos'), ('Africa/Libreville', 'Africa/Libreville'), ('Africa/Lome', 'Africa/Lome'), ('Africa/Luanda', 'Africa/Luanda'), ('Africa/Lubumbashi', 'Africa/Lubumbashi'), ('Africa/Lusaka', 'Africa/Lusaka'), ('Africa/Malabo', 'Africa/Malabo'), ('Africa/Maputo', 'Africa/Maputo'), ('Africa/Maseru', 'Africa/Maseru'), ('Africa/Mbabane', 'Africa/Mbabane'), ('Africa/Mogadishu', 'Africa/Mogadishu'), ('Africa/Monrovia', 'Africa/Monrovia'), ('Africa/Nairobi', 'Africa/Nairobi'), ('Africa/Ndjamena', 'Africa/Ndjamena'), ('Africa/Niamey', 'Africa/Niamey'), ('Africa/Nouakchott', 'Africa/Nouakchott'), ('Africa/Ouagadougou', 'Africa/Ouagadougou'), ('Africa/Porto-Novo', 'Africa/Porto-Novo'), ('Africa/Sao_Tome', 'Africa/Sao_Tome'), ('Africa/Timbuktu', 'Africa/Timbuktu'), ('Africa/Tripoli', 'Africa/Tripoli'), ('Africa/Tunis', 'Africa/Tunis'), ('Africa/Windhoek', 'Africa/Windhoek'), ('America/Adak', 'America/Adak'), ('America/Anchorage', 'America/Anchorage'), ('America/Anguilla', 'America/Anguilla'), ('America/Antigua', 'America/Antigua'), ('America/Araguaina', 'America/Araguaina'), ('America/Argentina/Buenos_Aires', 'America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', 'America/Argentina/Catamarca'), ('America/Argentina/ComodRivadavia', 'America/Argentina/ComodRivadavia'), ('America/Argentina/Cordoba', 'America/Argentina/Cordoba'), ('America/Argentina/Jujuy', 'America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', 'America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', 'America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', 'America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', 'America/Argentina/Salta'), ('America/Argentina/San_Juan', 'America/Argentina/San_Juan'), ('America/Argentina/San_Luis', 'America/Argentina/San_Luis'), ('America/Argentina/Tucuman', 'America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', 'America/Argentina/Ushuaia'), ('America/Aruba', 'America/Aruba'), ('America/Asuncion', 'America/Asuncion'), ('America/Atikokan', 'America/Atikokan'), ('America/Atka', 'America/Atka'), ('America/Bahia', 'America/Bahia'), ('America/Bahia_Banderas', 'America/Bahia_Banderas'), ('America/Barbados', 'America/Barbados'), ('America/Belem', 'America/Belem'), ('America/Belize', 'America/Belize'), ('America/Blanc-Sablon', 'America/Blanc-Sablon'), ('America/Boa_Vista', 'America/Boa_Vista'), ('America/Bogota', 'America/Bogota'), ('America/Boise', 'America/Boise'), ('America/Buenos_Aires', 'America/Buenos_Aires'), ('America/Cambridge_Bay', 'America/Cambridge_Bay'), ('America/Campo_Grande', 'America/Campo_Grande'), ('America/Cancun', 'America/Cancun'), ('America/Caracas', 'America/Caracas'), ('America/Catamarca', 'America/Catamarca'), ('America/Cayenne', 'America/Cayenne'), ('America/Cayman', 'America/Cayman'), ('America/Chicago', 'America/Chicago'), ('America/Chihuahua', 'America/Chihuahua'), ('America/Coral_Harbour', 'America/Coral_Harbour'), ('America/Cordoba', 'America/Cordoba'), ('America/Costa_Rica', 'America/Costa_Rica'), ('America/Creston', 'America/Creston'), ('America/Cuiaba', 'America/Cuiaba'), ('America/Curacao', 'America/Curacao'), ('America/Danmarkshavn', 'America/Danmarkshavn'), ('America/Dawson', 'America/Dawson'), ('America/Dawson_Creek', 'America/Dawson_Creek'), ('America/Denver', 'America/Denver'), ('America/Detroit', 'America/Detroit'), ('America/Dominica', 'America/Dominica'), ('America/Edmonton', 'America/Edmonton'), ('America/Eirunepe', 'America/Eirunepe'), ('America/El_Salvador', 'America/El_Salvador'), ('America/Ensenada', 'America/Ensenada'), ('America/Fort_Nelson', 'America/Fort_Nelson'), ('America/Fort_Wayne', 'America/Fort_Wayne'), ('America/Fortaleza', 'America/Fortaleza'), ('America/Glace_Bay', 'America/Glace_Bay'), ('America/Godthab', 'America/Godthab'), ('America/Goose_Bay', 'America/Goose_Bay'), ('America/Grand_Turk', 'America/Grand_Turk'), ('America/Grenada', 'America/Grenada'), ('America/Guadeloupe', 'America/Guadeloupe'), ('America/Guatemala', 'America/Guatemala'), ('America/Guayaquil', 'America/Guayaquil'), ('America/Guyana', 'America/Guyana'), ('America/Halifax', 'America/Halifax'), ('America/Havana', 'America/Havana'), ('America/Hermosillo', 'America/Hermosillo'), ('America/Indiana/Indianapolis', 'America/Indiana/Indianapolis'), ('America/Indiana/Knox', 'America/Indiana/Knox'), ('America/Indiana/Marengo', 'America/Indiana/Marengo'), ('America/Indiana/Petersburg', 'America/Indiana/Petersburg'), ('America/Indiana/Tell_City', 'America/Indiana/Tell_City'), ('America/Indiana/Vevay', 'America/Indiana/Vevay'), ('America/Indiana/Vincennes', 'America/Indiana/Vincennes'), ('America/Indiana/Winamac', 'America/Indiana/Winamac'), ('America/Indianapolis', 'America/Indianapolis'), ('America/Inuvik', 'America/Inuvik'), ('America/Iqaluit', 'America/Iqaluit'), ('America/Jamaica', 'America/Jamaica'), ('America/Jujuy', 'America/Jujuy'), ('America/Juneau', 'America/Juneau'), ('America/Kentucky/Louisville', 'America/Kentucky/Louisville'), ('America/Kentucky/Monticello', 'America/Kentucky/Monticello'), ('America/Knox_IN', 'America/Knox_IN'), ('America/Kralendijk', 'America/Kralendijk'), ('America/La_Paz', 'America/La_Paz'), ('America/Lima', 'America/Lima'), ('America/Los_Angeles', 'America/Los_Angeles'), ('America/Louisville', 'America/Louisville'), ('America/Lower_Princes', 'America/Lower_Princes'), ('America/Maceio', 'America/Maceio'), ('America/Managua', 'America/Managua'), ('America/Manaus', 'America/Manaus'), ('America/Marigot', 'America/Marigot'), ('America/Martinique', 'America/Martinique'), ('America/Matamoros', 'America/Matamoros'), ('America/Mazatlan', 'America/Mazatlan'), ('America/Mendoza', 'America/Mendoza'), ('America/Menominee', 'America/Menominee'), ('America/Merida', 'America/Merida'), ('America/Metlakatla', 'America/Metlakatla'), ('America/Mexico_City', 'America/Mexico_City'), ('America/Miquelon', 'America/Miquelon'), ('America/Moncton', 'America/Moncton'), ('America/Monterrey', 'America/Monterrey'), ('America/Montevideo', 'America/Montevideo'), ('America/Montreal', 'America/Montreal'), ('America/Montserrat', 'America/Montserrat'), ('America/Nassau', 'America/Nassau'), ('America/New_York', 'America/New_York'), ('America/Nipigon', 'America/Nipigon'), ('America/Nome', 'America/Nome'), ('America/Noronha', 'America/Noronha'), ('America/North_Dakota/Beulah', 'America/North_Dakota/Beulah'), ('America/North_Dakota/Center', 'America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', 'America/North_Dakota/New_Salem'), ('America/Nuuk', 'America/Nuuk'), ('America/Ojinaga', 'America/Ojinaga'), ('America/Panama', 'America/Panama'), ('America/Pangnirtung', 'America/Pangnirtung'), ('America/Paramaribo', 'America/Paramaribo'), ('America/Phoenix', 'America/Phoenix'), ('America/Port-au-Prince', 'America/Port-au-Prince'), ('America/Port_of_Spain', 'America/Port_of_Spain'), ('America/Porto_Acre', 'America/Porto_Acre'), ('America/Porto_Velho', 'America/Porto_Velho'), ('America/Puerto_Rico', 'America/Puerto_Rico'), ('America/Punta_Arenas', 'America/Punta_Arenas'), ('America/Rainy_River', 'America/Rainy_River'), ('America/Rankin_Inlet', 'America/Rankin_Inlet'), ('America/Recife', 'America/Recife'), ('America/Regina', 'America/Regina'), ('America/Resolute', 'America/Resolute'), ('America/Rio_Branco', 'America/Rio_Branco'), ('America/Rosario', 'America/Rosario'), ('America/Santa_Isabel', 'America/Santa_Isabel'), ('America/Santarem', 'America/Santarem'), ('America/Santiago', 'America/Santiago'), ('America/Santo_Domingo', 'America/Santo_Domingo'), ('America/Sao_Paulo', 'America/Sao_Paulo'), ('America/Scoresbysund', 'America/Scoresbysund'), ('America/Shiprock', 'America/Shiprock'), ('America/Sitka', 'America/Sitka'), ('America/St_Barthelemy', 'America/St_Barthelemy'), ('America/St_Johns', 'America/St_Johns'), ('America/St_Kitts', 'America/St_Kitts'), ('America/St_Lucia', 'America/St_Lucia'), ('America/St_Thomas', 'America/St_Thomas'), ('America/St_Vincent', 'America/St_Vincent'), ('America/Swift_Current', 'America/Swift_Current'), ('America/Tegucigalpa', 'America/Tegucigalpa'), ('America/Thule', 'America/Thule'), ('America/Thunder_Bay', 'America/Thunder_Bay'), ('America/Tijuana', 'America/Tijuana'), ('America/Toronto', 'America/Toronto'), ('America/Tortola', 'America/Tortola'), ('America/Vancouver', 'America/Vancouver'), ('America/Virgin', 'America/Virgin'), ('America/Whitehorse', 'America/Whitehorse'), ('America/Winnipeg', 'America/Winnipeg'), ('America/Yakutat', 'America/Yakutat'), ('America/Yellowknife', 'America/Yellowknife'), ('Antarctica/Casey', 'Antarctica/Casey'), ('Antarctica/Davis', 'Antarctica/Davis'), ('Antarctica/DumontDUrville', 'Antarctica/DumontDUrville'), ('Antarctica/Macquarie', 'Antarctica/Macquarie'), ('Antarctica/Mawson', 'Antarctica/Mawson'), ('Antarctica/McMurdo', 'Antarctica/McMurdo'), ('Antarctica/Palmer', 'Antarctica/Palmer'), ('Antarctica/Rothera', 'Antarctica/Rothera'), ('Antarctica/South_Pole', 'Antarctica/South_Pole'), ('Antarctica/Syowa', 'Antarctica/Syowa'), ('Antarctica/Troll', 'Antarctica/Troll'), ('Antarctica/Vostok', 'Antarctica/Vostok'), ('Arctic/Longyearbyen', 'Arctic/Longyearbyen'), ('Asia/Aden', 'Asia/Aden'), ('Asia/Almaty', 'Asia/Almaty'), ('Asia/Amman', 'Asia/Amman'), ('Asia/Anadyr', 'Asia/Anadyr'), ('Asia/Aqtau', 'Asia/Aqtau'), ('Asia/Aqtobe', 'Asia/Aqtobe'), ('Asia/Ashgabat', 'Asia/Ashgabat'), ('Asia/Ashkhabad', 'Asia/Ashkhabad'), ('Asia/Atyrau', 'Asia/Atyrau'), ('Asia/Baghdad', 'Asia/Baghdad'), ('Asia/Bahrain', 'Asia/Bahrain'), ('Asia/Baku', 'Asia/Baku'), ('Asia/Bangkok', 'Asia/Bangkok'), ('Asia/Barnaul', 'Asia/Barnaul'), ('Asia/Beirut', 'Asia/Beirut'), ('Asia/Bishkek', 'Asia/Bishkek'), ('Asia/Brunei', 'Asia/Brunei'), ('Asia/Calcutta', 'Asia/Calcutta'), ('Asia/Chita', 'Asia/Chita'), ('Asia/Choibalsan', 'Asia/Choibalsan'), ('Asia/Chongqing', 'Asia/Chongqing'), ('Asia/Chungking', 'Asia/Chungking'), ('Asia/Colombo', 'Asia/Colombo'), ('Asia/Dacca', 'Asia/Dacca'), ('Asia/Damascus', 'Asia/Damascus'), ('Asia/Dhaka', 'Asia/Dhaka'), ('Asia/Dili', 'Asia/Dili'), ('Asia/Dubai', 'Asia/Dubai'), ('Asia/Dushanbe', 'Asia/Dushanbe'), ('Asia/Famagusta', 'Asia/Famagusta'), ('Asia/Gaza', 'Asia/Gaza'), ('Asia/Harbin', 'Asia/Harbin'), ('Asia/Hebron', 'Asia/Hebron'), ('Asia/Ho_Chi_Minh', 'Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', 'Asia/Hong_Kong'), ('Asia/Hovd', 'Asia/Hovd'), ('Asia/Irkutsk', 'Asia/Irkutsk'), ('Asia/Istanbul', 'Asia/Istanbul'), ('Asia/Jakarta', 'Asia/Jakarta'), ('Asia/Jayapura', 'Asia/Jayapura'), ('Asia/Jerusalem', 'Asia/Jerusalem'), ('Asia/Kabul', 'Asia/Kabul'), ('Asia/Kamchatka', 'Asia/Kamchatka'), ('Asia/Karachi', 'Asia/Karachi'), ('Asia/Kashgar', 'Asia/Kashgar'), ('Asia/Kathmandu', 'Asia/Kathmandu'), ('Asia/Katmandu', 'Asia/Katmandu'), ('Asia/Khandyga', 'Asia/Khandyga'), ('Asia/Kolkata', 'Asia/Kolkata'), ('Asia/Krasnoyarsk', 'Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', 'Asia/Kuala_Lumpur'), ('Asia/Kuching', 'Asia/Kuching'), ('Asia/Kuwait', 'Asia/Kuwait'), ('Asia/Macao', 'Asia/Macao'), ('Asia/Macau', 'Asia/Macau'), ('Asia/Magadan', 'Asia/Magadan'), ('Asia/Makassar', 'Asia/Makassar'), ('Asia/Manila', 'Asia/Manila'), ('Asia/Muscat', 'Asia/Muscat'), ('Asia/Nicosia', 'Asia/Nicosia'), ('Asia/Novokuznetsk', 'Asia/Novokuznetsk'), ('Asia/Novosibirsk', 'Asia/Novosibirsk'), ('Asia/Omsk', 'Asia/Omsk'), ('Asia/Oral', 'Asia/Oral'), ('Asia/Phnom_Penh', 'Asia/Phnom_Penh'), ('Asia/Pontianak', 'Asia/Pontianak'), ('Asia/Pyongyang', 'Asia/Pyongyang'), ('Asia/Qatar', 'Asia/Qatar'), ('Asia/Qostanay', 'Asia/Qostanay'), ('Asia/Qyzylorda', 'Asia/Qyzylorda'), ('Asia/Rangoon', 'Asia/Rangoon'), ('Asia/Riyadh', 'Asia/Riyadh'), ('Asia/Saigon', 'Asia/Saigon'), ('Asia/Sakhalin', 'Asia/Sakhalin'), ('Asia/Samarkand', 'Asia/Samarkand'), ('Asia/Seoul', 'Asia/Seoul'), ('Asia/Shanghai', 'Asia/Shanghai'), ('Asia/Singapore', 'Asia/Singapore'), ('Asia/Srednekolymsk', 'Asia/Srednekolymsk'), ('Asia/Taipei', 'Asia/Taipei'), ('Asia/Tashkent', 'Asia/Tashkent'), ('Asia/Tbilisi', 'Asia/Tbilisi'), ('Asia/Tehran', 'Asia/Tehran'), ('Asia/Tel_Aviv', 'Asia/Tel_Aviv'), ('Asia/Thimbu', 'Asia/Thimbu'), ('Asia/Thimphu', 'Asia/Thimphu'), ('Asia/Tokyo', 'Asia/Tokyo'), ('Asia/Tomsk', 'Asia/Tomsk'), ('Asia/Ujung_Pandang', 'Asia/Ujung_Pandang'), ('Asia/Ulaanbaatar', 'Asia/Ulaanbaatar'), ('Asia/Ulan_Bator', 'Asia/Ulan_Bator'), ('Asia/Urumqi', 'Asia/Urumqi'), ('Asia/Ust-Nera', 'Asia/Ust-Nera'), ('Asia/Vientiane', 'Asia/Vientiane'), ('Asia/Vladivostok', 'Asia/Vladivostok'), ('Asia/Yakutsk', 'Asia/Yakutsk'), ('Asia/Yangon', 'Asia/Yangon'), ('Asia/Yekaterinburg', 'Asia/Yekaterinburg'), ('Asia/Yerevan', 'Asia/Yerevan'), ('Atlantic/Azores', 'Atlantic/Azores'), ('Atlantic/Bermuda', 'Atlantic/Bermuda'), ('Atlantic/Canary', 'Atlantic/Canary'), ('Atlantic/Cape_Verde', 'Atlantic/Cape_Verde'), ('Atlantic/Faeroe', 'Atlantic/Faeroe'), ('Atlantic/Faroe', 'Atlantic/Faroe'), ('Atlantic/Jan_Mayen', 'Atlantic/Jan_Mayen'), ('Atlantic/Madeira', 'Atlantic/Madeira'), ('Atlantic/Reykjavik', 'Atlantic/Reykjavik'), ('Atlantic/South_Georgia', 'Atlantic/South_Georgia'), ('Atlantic/St_Helena', 'Atlantic/St_Helena'), ('Atlantic/Stanley', 'Atlantic/Stanley'), ('Australia/ACT', 'Australia/ACT'), ('Australia/Adelaide', 'Australia/Adelaide'), ('Australia/Brisbane', 'Australia/Brisbane'), ('Australia/Broken_Hill', 'Australia/Broken_Hill'), ('Australia/Canberra', 'Australia/Canberra'), ('Australia/Currie', 'Australia/Currie'), ('Australia/Darwin', 'Australia/Darwin'), ('Australia/Eucla', 'Australia/Eucla'), ('Australia/Hobart', 'Australia/Hobart'), ('Australia/LHI', 'Australia/LHI'), ('Australia/Lindeman', 'Australia/Lindeman'), ('Australia/Lord_Howe', 'Australia/Lord_Howe'), ('Australia/Melbourne', 'Australia/Melbourne'), ('Australia/NSW', 'Australia/NSW'), ('Australia/North', 'Australia/North'), ('Australia/Perth', 'Australia/Perth'), ('Australia/Queensland', 'Australia/Queensland'), ('Australia/South', 'Australia/South'), ('Australia/Sydney', 'Australia/Sydney'), ('Australia/Tasmania', 'Australia/Tasmania'), ('Australia/Victoria', 'Australia/Victoria'), ('Australia/West', 'Australia/West'), ('Australia/Yancowinna', 'Australia/Yancowinna'), ('Brazil/Acre', 'Brazil/Acre'), ('Brazil/DeNoronha', 'Brazil/DeNoronha'), ('Brazil/East', 'Brazil/East'), ('Brazil/West', 'Brazil/West'), ('CET', 'CET'), ('CST6CDT', 'CST6CDT'), ('Canada/Atlantic', 'Canada/Atlantic'), ('Canada/Central', 'Canada/Central'), ('Canada/Eastern', 'Canada/Eastern'), ('Canada/Mountain', 'Canada/Mountain'), ('Canada/Newfoundland', 'Canada/Newfoundland'), ('Canada/Pacific', 'Canada/Pacific'), ('Canada/Saskatchewan', 'Canada/Saskatchewan'), ('Canada/Yukon', 'Canada/Yukon'), ('Chile/Continental', 'Chile/Continental'), ('Chile/EasterIsland', 'Chile/EasterIsland'), ('Cuba', 'Cuba'), ('EET', 'EET'), ('EST', 'EST'), ('EST5EDT', 'EST5EDT'), ('Egypt', 'Egypt'), ('Eire', 'Eire'), ('Etc/GMT', 'Etc/GMT'), ('Etc/GMT+0', 'Etc/GMT+0'), ('Etc/GMT+1', 'Etc/GMT+1'), ('Etc/GMT+10', 'Etc/GMT+10'), ('Etc/GMT+11', 'Etc/GMT+11'), ('Etc/GMT+12', 'Etc/GMT+12'), ('Etc/GMT+2', 'Etc/GMT+2'), ('Etc/GMT+3', 'Etc/GMT+3'), ('Etc/GMT+4', 'Etc/GMT+4'), ('Etc/GMT+5', 'Etc/GMT+5'), ('Etc/GMT+6', 'Etc/GMT+6'), ('Etc/GMT+7', 'Etc/GMT+7'), ('Etc/GMT+8', 'Etc/GMT+8'), ('Etc/GMT+9', 'Etc/GMT+9'), ('Etc/GMT-0', 'Etc/GMT-0'), ('Etc/GMT-1', 'Etc/GMT-1'), ('Etc/GMT-10', 'Etc/GMT-10'), ('Etc/GMT-11', 'Etc/GMT-11'), ('Etc/GMT-12', 'Etc/GMT-12'), ('Etc/GMT-13', 'Etc/GMT-13'), ('Etc/GMT-14', 'Etc/GMT-14'), ('Etc/GMT-2', 'Etc/GMT-2'), ('Etc/GMT-3', 'Etc/GMT-3'), ('Etc/GMT-4', 'Etc/GMT-4'), ('Etc/GMT-5', 'Etc/GMT-5'), ('Etc/GMT-6', 'Etc/GMT-6'), ('Etc/GMT-7', 'Etc/GMT-7'), ('Etc/GMT-8', 'Etc/GMT-8'), ('Etc/GMT-9', 'Etc/GMT-9'), ('Etc/GMT0', 'Etc/GMT0'), ('Etc/Greenwich', 'Etc/Greenwich'), ('Etc/UCT', 'Etc/UCT'), ('Etc/UTC', 'Etc/UTC'), ('Etc/Universal', 'Etc/Universal'), ('Etc/Zulu', 'Etc/Zulu'), ('Europe/Amsterdam', 'Europe/Amsterdam'), ('Europe/Andorra', 'Europe/Andorra'), ('Europe/Astrakhan', 'Europe/Astrakhan'), ('Europe/Athens', 'Europe/Athens'), ('Europe/Belfast', 'Europe/Belfast'), ('Europe/Belgrade', 'Europe/Belgrade'), ('Europe/Berlin', 'Europe/Berlin'), ('Europe/Bratislava', 'Europe/Bratislava'), ('Europe/Brussels', 'Europe/Brussels'), ('Europe/Bucharest', 'Europe/Bucharest'), ('Europe/Budapest', 'Europe/Budapest'), ('Europe/Busingen', 'Europe/Busingen'), ('Europe/Chisinau', 'Europe/Chisinau'), ('Europe/Copenhagen', 'Europe/Copenhagen'), ('Europe/Dublin', 'Europe/Dublin'), ('Europe/Gibraltar', 'Europe/Gibraltar'), ('Europe/Guernsey', 'Europe/Guernsey'), ('Europe/Helsinki', 'Europe/Helsinki'), ('Europe/Isle_of_Man', 'Europe/Isle_of_Man'), ('Europe/Istanbul', 'Europe/Istanbul'), ('Europe/Jersey', 'Europe/Jersey'), ('Europe/Kaliningrad', 'Europe/Kaliningrad'), ('Europe/Kiev', 'Europe/Kiev'), ('Europe/Kirov', 'Europe/Kirov'), ('Europe/Kyiv', 'Europe/Kyiv'), ('Europe/Lisbon', 'Europe/Lisbon'), ('Europe/Ljubljana', 'Europe/Ljubljana'), ('Europe/London', 'Europe/London'), ('Europe/Luxembourg', 'Europe/Luxembourg'), ('Europe/Madrid', 'Europe/Madrid'), ('Europe/Malta', 'Europe/Malta'), ('Europe/Mariehamn', 'Europe/Mariehamn'), ('Europe/Minsk', 'Europe/Minsk'), ('Europe/Monaco', 'Europe/Monaco'), ('Europe/Moscow', 'Europe/Moscow'), ('Europe/Nicosia', 'Europe/Nicosia'), ('Europe/Oslo', 'Europe/Oslo'), ('Europe/Paris', 'Europe/Paris'), ('Europe/Podgorica', 'Europe/Podgorica'), ('Europe/Prague', 'Europe/Prague'), ('Europe/Riga', 'Europe/Riga'), ('Europe/Rome', 'Europe/Rome'), ('Europe/Samara', 'Europe/Samara'), ('Europe/San_Marino', 'Europe/San_Marino'), ('Europe/Sarajevo', 'Europe/Sarajevo'), ('Europe/Saratov', 'Europe/Saratov'), ('Europe/Simferopol', 'Europe/Simferopol'), ('Europe/Skopje', 'Europe/Skopje'), ('Europe/Sofia', 'Europe/Sofia'), ('Europe/Stockholm', 'Europe/Stockholm'), ('Europe/Tallinn', 'Europe/Tallinn'), ('Europe/Tirane', 'Europe/Tirane'), ('Europe/Tiraspol', 'Europe/Tiraspol'), ('Europe/Ulyanovsk', 'Europe/Ulyanovsk'), ('Europe/Uzhgorod', 'Europe/Uzhgorod'), ('Europe/Vaduz', 'Europe/Vaduz'), ('Europe/Vatican', 'Europe/Vatican'), ('Europe/Vienna', 'Europe/Vienna'), ('Europe/Vilnius', 'Europe/Vilnius'), ('Europe/Volgograd', 'Europe/Volgograd'), ('Europe/Warsaw', 'Europe/Warsaw'), ('Europe/Zagreb', 'Europe/Zagreb'), ('Europe/Zaporozhye', 'Europe/Zaporozhye'), ('Europe/Zurich', 'Europe/Zurich'), ('GB', 'GB'), ('GB-Eire', 'GB-Eire'), ('GMT', 'GMT'), ('GMT+0', 'GMT+0'), ('GMT-0', 'GMT-0'), ('GMT0', 'GMT0'), ('Greenwich', 'Greenwich'), ('HST', 'HST'), ('Hongkong', 'Hongkong'), ('Iceland', 'Iceland'), ('Indian/Antananarivo', 'Indian/Antananarivo'), ('Indian/Chagos', 'Indian/Chagos'), ('Indian/Christmas', 'Indian/Christmas'), ('Indian/Cocos', 'Indian/Cocos'), ('Indian/Comoro', 'Indian/Comoro'), ('Indian/Kerguelen', 'Indian/Kerguelen'), ('Indian/Mahe', 'Indian/Mahe'), ('Indian/Maldives', 'Indian/Maldives'), ('Indian/Mauritius', 'Indian/Mauritius'), ('Indian/Mayotte', 'Indian/Mayotte'), ('Indian/Reunion', 'Indian/Reunion'), ('Iran', 'Iran'), ('Israel', 'Israel'), ('Jamaica', 'Jamaica'), ('Japan', 'Japan'), ('Kwajalein', 'Kwajalein'), ('Libya', 'Libya'), ('MET', 'MET'), ('MST', 'MST'), ('MST7MDT', 'MST7MDT'), ('Mexico/BajaNorte', 'Mexico/BajaNorte'), ('Mexico/BajaSur', 'Mexico/BajaSur'), ('Mexico/General', 'Mexico/General'), ('NZ', 'NZ'), ('NZ-CHAT', 'NZ-CHAT'), ('Navajo', 'Navajo'), ('PRC', 'PRC'), ('PST8PDT', 'PST8PDT'), ('Pacific/Apia', 'Pacific/Apia'), ('Pacific/Auckland', 'Pacific/Auckland'), ('Pacific/Bougainville', 'Pacific/Bougainville'), ('Pacific/Chatham', 'Pacific/Chatham'), ('Pacific/Chuuk', 'Pacific/Chuuk'), ('Pacific/Easter', 'Pacific/Easter'), ('Pacific/Efate', 'Pacific/Efate'), ('Pacific/Enderbury', 'Pacific/Enderbury'), ('Pacific/Fakaofo', 'Pacific/Fakaofo'), ('Pacific/Fiji', 'Pacific/Fiji'), ('Pacific/Funafuti', 'Pacific/Funafuti'), ('Pacific/Galapagos', 'Pacific/Galapagos'), ('Pacific/Gambier', 'Pacific/Gambier'), ('Pacific/Guadalcanal', 'Pacific/Guadalcanal'), ('Pacific/Guam', 'Pacific/Guam'), ('Pacific/Honolulu', 'Pacific/Honolulu'), ('Pacific/Johnston', 'Pacific/Johnston'), ('Pacific/Kanton', 'Pacific/Kanton'), ('Pacific/Kiritimati', 'Pacific/Kiritimati'), ('Pacific/Kosrae', 'Pacific/Kosrae'), ('Pacific/Kwajalein', 'Pacific/Kwajalein'), ('Pacific/Majuro', 'Pacific/Majuro'), ('Pacific/Marquesas', 'Pacific/Marquesas'), ('Pacific/Midway', 'Pacific/Midway'), ('Pacific/Nauru', 'Pacific/Nauru'), ('Pacific/Niue', 'Pacific/Niue'), ('Pacific/Norfolk', 'Pacific/Norfolk'), ('Pacific/Noumea', 'Pacific/Noumea'), ('Pacific/Pago_Pago', 'Pacific/Pago_Pago'), ('Pacific/Palau', 'Pacific/Palau'), ('Pacific/Pitcairn', 'Pacific/Pitcairn'), ('Pacific/Pohnpei', 'Pacific/Pohnpei'), ('Pacific/Ponape', 'Pacific/Ponape'), ('Pacific/Port_Moresby', 'Pacific/Port_Moresby'), ('Pacific/Rarotonga', 'Pacific/Rarotonga'), ('Pacific/Saipan', 'Pacific/Saipan'), ('Pacific/Samoa', 'Pacific/Samoa'), ('Pacific/Tahiti', 'Pacific/Tahiti'), ('Pacific/Tarawa', 'Pacific/Tarawa'), ('Pacific/Tongatapu', 'Pacific/Tongatapu'), ('Pacific/Truk', 'Pacific/Truk'), ('Pacific/Wake', 'Pacific/Wake'), ('Pacific/Wallis', 'Pacific/Wallis'), ('Pacific/Yap', 'Pacific/Yap'), ('Poland', 'Poland'), ('Portugal', 'Portugal'), ('ROC', 'ROC'), ('ROK', 'ROK'), ('Singapore', 'Singapore'), ('Turkey', 'Turkey'), ('UCT', 'UCT'), ('US/Alaska', 'US/Alaska'), ('US/Aleutian', 'US/Aleutian'), ('US/Arizona', 'US/Arizona'), ('US/Central', 'US/Central'), ('US/East-Indiana', 'US/East-Indiana'), ('US/Eastern', 'US/Eastern'), ('US/Hawaii', 'US/Hawaii'), ('US/Indiana-Starke', 'US/Indiana-Starke'), ('US/Michigan', 'US/Michigan'), ('US/Mountain', 'US/Mountain'), ('US/Pacific', 'US/Pacific'), ('US/Samoa', 'US/Samoa'), ('UTC', 'UTC'), ('Universal', 'Universal'), ('W-SU', 'W-SU'), ('WET', 'WET'), ('Zulu', 'Zulu')], default='UTC', max_length=50),
+ ),
+ ]
diff --git a/dojo/db_migrations/0169_planned_remediation_date.py b/dojo/db_migrations/0169_planned_remediation_date.py
new file mode 100644
index 00000000000..1da9721d33f
--- /dev/null
+++ b/dojo/db_migrations/0169_planned_remediation_date.py
@@ -0,0 +1,33 @@
+# Generated by Django 3.2.13 on 2022-05-23 19:38
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0168_alter_system_settings_time_zone'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='finding',
+ name='planned_remediation_date',
+ field=models.DateField(help_text='The date the flaw is expected to be remediated.', null=True, verbose_name='Planned Remediation Date'),
+ ),
+ migrations.AlterField(
+ model_name='child_rule',
+ name='match_field',
+ field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_mitigated', 'is_mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date'), ('service', 'service'), ('planned_remediation_date', 'planned_remediation_date')], max_length=200),
+ ),
+ migrations.AlterField(
+ model_name='rule',
+ name='applied_field',
+ field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_mitigated', 'is_mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date'), ('service', 'service'), ('planned_remediation_date', 'planned_remediation_date')], max_length=200),
+ ),
+ migrations.AlterField(
+ model_name='rule',
+ name='match_field',
+ field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_mitigated', 'is_mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date'), ('service', 'service'), ('planned_remediation_date', 'planned_remediation_date')], max_length=200),
+ ),
+ ]
diff --git a/dojo/db_migrations/0170_jira_project_custom_fields.py b/dojo/db_migrations/0170_jira_project_custom_fields.py
new file mode 100644
index 00000000000..a05c858cd41
--- /dev/null
+++ b/dojo/db_migrations/0170_jira_project_custom_fields.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.15 on 2022-08-29 12:01
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0169_planned_remediation_date'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='jira_project',
+ name='custom_fields',
+ field=models.JSONField(blank=True, help_text='JIRA custom field JSON mapping of Id to value, e.g. {"customfield_10122": [{"name": "8.0.1"}]}', max_length=200, null=True),
+ ),
+ ]
diff --git a/dojo/db_migrations/0171_jira_labels_per_product_and_engagement.py b/dojo/db_migrations/0171_jira_labels_per_product_and_engagement.py
new file mode 100644
index 00000000000..585f80a8143
--- /dev/null
+++ b/dojo/db_migrations/0171_jira_labels_per_product_and_engagement.py
@@ -0,0 +1,23 @@
+# Generated by Django 3.2.15 on 2022-08-16 13:33
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0170_jira_project_custom_fields'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='jira_project',
+ name='add_vulnerability_id_to_jira_label',
+ field=models.BooleanField(default=False, verbose_name='Add vulnerability Id as a JIRA label'),
+ ),
+ migrations.AddField(
+ model_name='jira_project',
+ name='jira_labels',
+ field=models.CharField(blank=True, help_text='JIRA issue labels space seperated', max_length=200, null=True),
+ ),
+ ]
diff --git a/dojo/db_migrations/0172_optimize_usage_of_endpoint_status.py b/dojo/db_migrations/0172_optimize_usage_of_endpoint_status.py
new file mode 100644
index 00000000000..b6e3474117d
--- /dev/null
+++ b/dojo/db_migrations/0172_optimize_usage_of_endpoint_status.py
@@ -0,0 +1,50 @@
+# Generated by Django 3.2.13 on 2022-04-26 20:44
+
+from django.db import migrations, models
+import dojo.models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0171_jira_labels_per_product_and_engagement'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='endpoint',
+ name='endpoint_status',
+ ),
+ migrations.RemoveField(
+ model_name='finding',
+ name='endpoint_status',
+ ),
+ # Yes, we can just remove it and add it again because there reduntant data in "Endpoint_Status" - there will be no data-loss; it was tested
+ # AlterField is not usable because of: ValueError: Cannot alter field xxx into yyy - they are not compatible types (you cannot alter to or from M2M fields, or add or remove through= on M2M fields)
+ migrations.RemoveField(
+ model_name='finding',
+ name='endpoints',
+ ),
+ migrations.AddField(
+ model_name='finding',
+ name='endpoints',
+ field=models.ManyToManyField(blank=True,
+ help_text='The hosts within the product that are susceptible to this flaw. + The status of the endpoint associated with this flaw (Vulnerable, Mitigated, ...).',
+ through='dojo.Endpoint_Status',
+ to='dojo.Endpoint',
+ verbose_name='Endpoints'),
+ ),
+ migrations.AddField(
+ model_name='endpoint',
+ name='findings',
+ field=models.ManyToManyField(blank=True,
+ through='dojo.Endpoint_Status',
+ to='dojo.Finding',
+ verbose_name='Findings'),
+ ),
+ migrations.AlterField(
+ model_name='endpoint_status',
+ name='date',
+ field=models.DateField(default=dojo.models.get_current_date),
+ ),
+ ]
diff --git a/dojo/db_migrations/0173_alter_risk_acceptance_name.py b/dojo/db_migrations/0173_alter_risk_acceptance_name.py
new file mode 100644
index 00000000000..bb39f0114af
--- /dev/null
+++ b/dojo/db_migrations/0173_alter_risk_acceptance_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.16 on 2022-10-28 13:12
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0172_optimize_usage_of_endpoint_status'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='risk_acceptance',
+ name='name',
+ field=models.CharField(help_text='Descriptive name which in the future may also be used to group risk acceptances together across engagements and products', max_length=300),
+ ),
+ ]
diff --git a/dojo/db_migrations/0174_jira_project_default_assignee.py b/dojo/db_migrations/0174_jira_project_default_assignee.py
new file mode 100644
index 00000000000..67b80a878eb
--- /dev/null
+++ b/dojo/db_migrations/0174_jira_project_default_assignee.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.16 on 2022-11-23 12:49
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0173_alter_risk_acceptance_name'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='jira_project',
+ name='default_assignee',
+ field=models.CharField(blank=True, help_text='JIRA default assignee (name). If left blank then it defaults to whatever is configured in JIRA.', max_length=200, null=True),
+ ),
+ ]
diff --git a/dojo/db_migrations/0175_system_settings_enable_notify_sla.py b/dojo/db_migrations/0175_system_settings_enable_notify_sla.py
new file mode 100644
index 00000000000..2c10d587ef9
--- /dev/null
+++ b/dojo/db_migrations/0175_system_settings_enable_notify_sla.py
@@ -0,0 +1,69 @@
+# Generated by Django 3.2.16 on 2022-11-19 22:51
+
+from django.db import migrations, models
+from django.conf import settings
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def migrate_notify_sla_from_settings_file(apps, schema_editor):
+
+ if hasattr(settings, 'SLA_NOTIFY_ACTIVE'):
+ system_settings_model = apps.get_model('dojo', 'System_Settings')
+ logger.info('Migrating value from SLA_NOTIFY_ACTIVE into system settings model')
+ try:
+ system_setting = system_settings_model.objects.get()
+ system_setting.enable_notify_sla_active = settings.SLA_NOTIFY_ACTIVE
+ system_setting.save()
+ except:
+ # for a clean installation there is no system_settings record, so just ignore it
+ pass
+
+ if hasattr(settings, 'SLA_NOTIFY_ACTIVE_VERIFIED_ONLY'):
+ system_settings_model = apps.get_model('dojo', 'System_Settings')
+ logger.info('Migrating value from SLA_NOTIFY_ACTIVE_VERIFIED_ONLY into system settings model')
+ try:
+ system_setting = system_settings_model.objects.get()
+ system_setting.enable_notify_sla_active_verified = settings.SLA_NOTIFY_ACTIVE_VERIFIED_ONLY
+ system_setting.save()
+ except:
+ # for a clean installation there is no system_settings record, so just ignore it
+ pass
+
+ if hasattr(settings, 'SLA_NOTIFY_WITH_JIRA_ONLY'):
+ system_settings_model = apps.get_model('dojo', 'System_Settings')
+ logger.info('Migrating value from SLA_NOTIFY_WITH_JIRA_ONLY into system settings model')
+ try:
+ system_setting = system_settings_model.objects.get()
+ system_setting.enable_notify_sla_jira_only = settings.SLA_NOTIFY_WITH_JIRA_ONLY
+ system_setting.save()
+ except:
+ # for a clean installation there is no system_settings record, so just ignore it
+ pass
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0174_jira_project_default_assignee'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_notify_sla_active',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for active Findings.", verbose_name="Enable Notifiy SLA's Breach for active Findings"),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_notify_sla_active_verified',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for active, verified Findings.", verbose_name="Enable Notifiy SLA's Breach for active, verified Findings"),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_notify_sla_jira_only',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for Findings that are linked to JIRA issues.", verbose_name="Enable Notifiy SLA's Breach for Findings linked to JIRA"),
+ ),
+ migrations.RunPython(migrate_notify_sla_from_settings_file),
+ ]
diff --git a/dojo/db_migrations/0176_custom_password_requirements.py b/dojo/db_migrations/0176_custom_password_requirements.py
new file mode 100644
index 00000000000..112f751a174
--- /dev/null
+++ b/dojo/db_migrations/0176_custom_password_requirements.py
@@ -0,0 +1,43 @@
+# Generated by Django 3.2.16 on 2022-11-27 21:33
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0175_system_settings_enable_notify_sla'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='lowercase_character_required',
+ field=models.BooleanField(default=True, help_text='Requires user passwords to contain at least one lowercase letter (a-z).', verbose_name='Password must contain one lowercase letter'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='maximum_password_length',
+ field=models.IntegerField(default=48, help_text='Requires user to set passwords less than maximum length.', verbose_name='Maximum password length'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='minimum_password_length',
+ field=models.IntegerField(default=9, help_text='Requires user to set passwords greater than minimum length.', verbose_name='Minimum password length'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='number_character_required',
+ field=models.BooleanField(default=True, help_text='Requires user passwords to contain at least one digit (0-9).', verbose_name='Password must contain one digit'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='special_character_required',
+ field=models.BooleanField(default=True, help_text='Requires user passwords to contain at least one special character (()[]{}|\\`~!@#$%^&*_-+=;:\'",<>./?).', verbose_name='Password must contain one special character'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='uppercase_character_required',
+ field=models.BooleanField(default=True, help_text='Requires user passwords to contain at least one uppercase letter (A-Z).', verbose_name='Password must contain one uppercase letter'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0177_alter_system_settings_time_zone.py b/dojo/db_migrations/0177_alter_system_settings_time_zone.py
new file mode 100644
index 00000000000..9b50ff804b3
--- /dev/null
+++ b/dojo/db_migrations/0177_alter_system_settings_time_zone.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.16 on 2022-12-20 04:39
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0176_custom_password_requirements'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='system_settings',
+ name='time_zone',
+ field=models.CharField(choices=[('Africa/Abidjan', 'Africa/Abidjan'), ('Africa/Accra', 'Africa/Accra'), ('Africa/Addis_Ababa', 'Africa/Addis_Ababa'), ('Africa/Algiers', 'Africa/Algiers'), ('Africa/Asmara', 'Africa/Asmara'), ('Africa/Asmera', 'Africa/Asmera'), ('Africa/Bamako', 'Africa/Bamako'), ('Africa/Bangui', 'Africa/Bangui'), ('Africa/Banjul', 'Africa/Banjul'), ('Africa/Bissau', 'Africa/Bissau'), ('Africa/Blantyre', 'Africa/Blantyre'), ('Africa/Brazzaville', 'Africa/Brazzaville'), ('Africa/Bujumbura', 'Africa/Bujumbura'), ('Africa/Cairo', 'Africa/Cairo'), ('Africa/Casablanca', 'Africa/Casablanca'), ('Africa/Ceuta', 'Africa/Ceuta'), ('Africa/Conakry', 'Africa/Conakry'), ('Africa/Dakar', 'Africa/Dakar'), ('Africa/Dar_es_Salaam', 'Africa/Dar_es_Salaam'), ('Africa/Djibouti', 'Africa/Djibouti'), ('Africa/Douala', 'Africa/Douala'), ('Africa/El_Aaiun', 'Africa/El_Aaiun'), ('Africa/Freetown', 'Africa/Freetown'), ('Africa/Gaborone', 'Africa/Gaborone'), ('Africa/Harare', 'Africa/Harare'), ('Africa/Johannesburg', 'Africa/Johannesburg'), ('Africa/Juba', 'Africa/Juba'), ('Africa/Kampala', 'Africa/Kampala'), ('Africa/Khartoum', 'Africa/Khartoum'), ('Africa/Kigali', 'Africa/Kigali'), ('Africa/Kinshasa', 'Africa/Kinshasa'), ('Africa/Lagos', 'Africa/Lagos'), ('Africa/Libreville', 'Africa/Libreville'), ('Africa/Lome', 'Africa/Lome'), ('Africa/Luanda', 'Africa/Luanda'), ('Africa/Lubumbashi', 'Africa/Lubumbashi'), ('Africa/Lusaka', 'Africa/Lusaka'), ('Africa/Malabo', 'Africa/Malabo'), ('Africa/Maputo', 'Africa/Maputo'), ('Africa/Maseru', 'Africa/Maseru'), ('Africa/Mbabane', 'Africa/Mbabane'), ('Africa/Mogadishu', 'Africa/Mogadishu'), ('Africa/Monrovia', 'Africa/Monrovia'), ('Africa/Nairobi', 'Africa/Nairobi'), ('Africa/Ndjamena', 'Africa/Ndjamena'), ('Africa/Niamey', 'Africa/Niamey'), ('Africa/Nouakchott', 'Africa/Nouakchott'), ('Africa/Ouagadougou', 'Africa/Ouagadougou'), ('Africa/Porto-Novo', 'Africa/Porto-Novo'), ('Africa/Sao_Tome', 'Africa/Sao_Tome'), ('Africa/Timbuktu', 'Africa/Timbuktu'), ('Africa/Tripoli', 'Africa/Tripoli'), ('Africa/Tunis', 'Africa/Tunis'), ('Africa/Windhoek', 'Africa/Windhoek'), ('America/Adak', 'America/Adak'), ('America/Anchorage', 'America/Anchorage'), ('America/Anguilla', 'America/Anguilla'), ('America/Antigua', 'America/Antigua'), ('America/Araguaina', 'America/Araguaina'), ('America/Argentina/Buenos_Aires', 'America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', 'America/Argentina/Catamarca'), ('America/Argentina/ComodRivadavia', 'America/Argentina/ComodRivadavia'), ('America/Argentina/Cordoba', 'America/Argentina/Cordoba'), ('America/Argentina/Jujuy', 'America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', 'America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', 'America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', 'America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', 'America/Argentina/Salta'), ('America/Argentina/San_Juan', 'America/Argentina/San_Juan'), ('America/Argentina/San_Luis', 'America/Argentina/San_Luis'), ('America/Argentina/Tucuman', 'America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', 'America/Argentina/Ushuaia'), ('America/Aruba', 'America/Aruba'), ('America/Asuncion', 'America/Asuncion'), ('America/Atikokan', 'America/Atikokan'), ('America/Atka', 'America/Atka'), ('America/Bahia', 'America/Bahia'), ('America/Bahia_Banderas', 'America/Bahia_Banderas'), ('America/Barbados', 'America/Barbados'), ('America/Belem', 'America/Belem'), ('America/Belize', 'America/Belize'), ('America/Blanc-Sablon', 'America/Blanc-Sablon'), ('America/Boa_Vista', 'America/Boa_Vista'), ('America/Bogota', 'America/Bogota'), ('America/Boise', 'America/Boise'), ('America/Buenos_Aires', 'America/Buenos_Aires'), ('America/Cambridge_Bay', 'America/Cambridge_Bay'), ('America/Campo_Grande', 'America/Campo_Grande'), ('America/Cancun', 'America/Cancun'), ('America/Caracas', 'America/Caracas'), ('America/Catamarca', 'America/Catamarca'), ('America/Cayenne', 'America/Cayenne'), ('America/Cayman', 'America/Cayman'), ('America/Chicago', 'America/Chicago'), ('America/Chihuahua', 'America/Chihuahua'), ('America/Ciudad_Juarez', 'America/Ciudad_Juarez'), ('America/Coral_Harbour', 'America/Coral_Harbour'), ('America/Cordoba', 'America/Cordoba'), ('America/Costa_Rica', 'America/Costa_Rica'), ('America/Creston', 'America/Creston'), ('America/Cuiaba', 'America/Cuiaba'), ('America/Curacao', 'America/Curacao'), ('America/Danmarkshavn', 'America/Danmarkshavn'), ('America/Dawson', 'America/Dawson'), ('America/Dawson_Creek', 'America/Dawson_Creek'), ('America/Denver', 'America/Denver'), ('America/Detroit', 'America/Detroit'), ('America/Dominica', 'America/Dominica'), ('America/Edmonton', 'America/Edmonton'), ('America/Eirunepe', 'America/Eirunepe'), ('America/El_Salvador', 'America/El_Salvador'), ('America/Ensenada', 'America/Ensenada'), ('America/Fort_Nelson', 'America/Fort_Nelson'), ('America/Fort_Wayne', 'America/Fort_Wayne'), ('America/Fortaleza', 'America/Fortaleza'), ('America/Glace_Bay', 'America/Glace_Bay'), ('America/Godthab', 'America/Godthab'), ('America/Goose_Bay', 'America/Goose_Bay'), ('America/Grand_Turk', 'America/Grand_Turk'), ('America/Grenada', 'America/Grenada'), ('America/Guadeloupe', 'America/Guadeloupe'), ('America/Guatemala', 'America/Guatemala'), ('America/Guayaquil', 'America/Guayaquil'), ('America/Guyana', 'America/Guyana'), ('America/Halifax', 'America/Halifax'), ('America/Havana', 'America/Havana'), ('America/Hermosillo', 'America/Hermosillo'), ('America/Indiana/Indianapolis', 'America/Indiana/Indianapolis'), ('America/Indiana/Knox', 'America/Indiana/Knox'), ('America/Indiana/Marengo', 'America/Indiana/Marengo'), ('America/Indiana/Petersburg', 'America/Indiana/Petersburg'), ('America/Indiana/Tell_City', 'America/Indiana/Tell_City'), ('America/Indiana/Vevay', 'America/Indiana/Vevay'), ('America/Indiana/Vincennes', 'America/Indiana/Vincennes'), ('America/Indiana/Winamac', 'America/Indiana/Winamac'), ('America/Indianapolis', 'America/Indianapolis'), ('America/Inuvik', 'America/Inuvik'), ('America/Iqaluit', 'America/Iqaluit'), ('America/Jamaica', 'America/Jamaica'), ('America/Jujuy', 'America/Jujuy'), ('America/Juneau', 'America/Juneau'), ('America/Kentucky/Louisville', 'America/Kentucky/Louisville'), ('America/Kentucky/Monticello', 'America/Kentucky/Monticello'), ('America/Knox_IN', 'America/Knox_IN'), ('America/Kralendijk', 'America/Kralendijk'), ('America/La_Paz', 'America/La_Paz'), ('America/Lima', 'America/Lima'), ('America/Los_Angeles', 'America/Los_Angeles'), ('America/Louisville', 'America/Louisville'), ('America/Lower_Princes', 'America/Lower_Princes'), ('America/Maceio', 'America/Maceio'), ('America/Managua', 'America/Managua'), ('America/Manaus', 'America/Manaus'), ('America/Marigot', 'America/Marigot'), ('America/Martinique', 'America/Martinique'), ('America/Matamoros', 'America/Matamoros'), ('America/Mazatlan', 'America/Mazatlan'), ('America/Mendoza', 'America/Mendoza'), ('America/Menominee', 'America/Menominee'), ('America/Merida', 'America/Merida'), ('America/Metlakatla', 'America/Metlakatla'), ('America/Mexico_City', 'America/Mexico_City'), ('America/Miquelon', 'America/Miquelon'), ('America/Moncton', 'America/Moncton'), ('America/Monterrey', 'America/Monterrey'), ('America/Montevideo', 'America/Montevideo'), ('America/Montreal', 'America/Montreal'), ('America/Montserrat', 'America/Montserrat'), ('America/Nassau', 'America/Nassau'), ('America/New_York', 'America/New_York'), ('America/Nipigon', 'America/Nipigon'), ('America/Nome', 'America/Nome'), ('America/Noronha', 'America/Noronha'), ('America/North_Dakota/Beulah', 'America/North_Dakota/Beulah'), ('America/North_Dakota/Center', 'America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', 'America/North_Dakota/New_Salem'), ('America/Nuuk', 'America/Nuuk'), ('America/Ojinaga', 'America/Ojinaga'), ('America/Panama', 'America/Panama'), ('America/Pangnirtung', 'America/Pangnirtung'), ('America/Paramaribo', 'America/Paramaribo'), ('America/Phoenix', 'America/Phoenix'), ('America/Port-au-Prince', 'America/Port-au-Prince'), ('America/Port_of_Spain', 'America/Port_of_Spain'), ('America/Porto_Acre', 'America/Porto_Acre'), ('America/Porto_Velho', 'America/Porto_Velho'), ('America/Puerto_Rico', 'America/Puerto_Rico'), ('America/Punta_Arenas', 'America/Punta_Arenas'), ('America/Rainy_River', 'America/Rainy_River'), ('America/Rankin_Inlet', 'America/Rankin_Inlet'), ('America/Recife', 'America/Recife'), ('America/Regina', 'America/Regina'), ('America/Resolute', 'America/Resolute'), ('America/Rio_Branco', 'America/Rio_Branco'), ('America/Rosario', 'America/Rosario'), ('America/Santa_Isabel', 'America/Santa_Isabel'), ('America/Santarem', 'America/Santarem'), ('America/Santiago', 'America/Santiago'), ('America/Santo_Domingo', 'America/Santo_Domingo'), ('America/Sao_Paulo', 'America/Sao_Paulo'), ('America/Scoresbysund', 'America/Scoresbysund'), ('America/Shiprock', 'America/Shiprock'), ('America/Sitka', 'America/Sitka'), ('America/St_Barthelemy', 'America/St_Barthelemy'), ('America/St_Johns', 'America/St_Johns'), ('America/St_Kitts', 'America/St_Kitts'), ('America/St_Lucia', 'America/St_Lucia'), ('America/St_Thomas', 'America/St_Thomas'), ('America/St_Vincent', 'America/St_Vincent'), ('America/Swift_Current', 'America/Swift_Current'), ('America/Tegucigalpa', 'America/Tegucigalpa'), ('America/Thule', 'America/Thule'), ('America/Thunder_Bay', 'America/Thunder_Bay'), ('America/Tijuana', 'America/Tijuana'), ('America/Toronto', 'America/Toronto'), ('America/Tortola', 'America/Tortola'), ('America/Vancouver', 'America/Vancouver'), ('America/Virgin', 'America/Virgin'), ('America/Whitehorse', 'America/Whitehorse'), ('America/Winnipeg', 'America/Winnipeg'), ('America/Yakutat', 'America/Yakutat'), ('America/Yellowknife', 'America/Yellowknife'), ('Antarctica/Casey', 'Antarctica/Casey'), ('Antarctica/Davis', 'Antarctica/Davis'), ('Antarctica/DumontDUrville', 'Antarctica/DumontDUrville'), ('Antarctica/Macquarie', 'Antarctica/Macquarie'), ('Antarctica/Mawson', 'Antarctica/Mawson'), ('Antarctica/McMurdo', 'Antarctica/McMurdo'), ('Antarctica/Palmer', 'Antarctica/Palmer'), ('Antarctica/Rothera', 'Antarctica/Rothera'), ('Antarctica/South_Pole', 'Antarctica/South_Pole'), ('Antarctica/Syowa', 'Antarctica/Syowa'), ('Antarctica/Troll', 'Antarctica/Troll'), ('Antarctica/Vostok', 'Antarctica/Vostok'), ('Arctic/Longyearbyen', 'Arctic/Longyearbyen'), ('Asia/Aden', 'Asia/Aden'), ('Asia/Almaty', 'Asia/Almaty'), ('Asia/Amman', 'Asia/Amman'), ('Asia/Anadyr', 'Asia/Anadyr'), ('Asia/Aqtau', 'Asia/Aqtau'), ('Asia/Aqtobe', 'Asia/Aqtobe'), ('Asia/Ashgabat', 'Asia/Ashgabat'), ('Asia/Ashkhabad', 'Asia/Ashkhabad'), ('Asia/Atyrau', 'Asia/Atyrau'), ('Asia/Baghdad', 'Asia/Baghdad'), ('Asia/Bahrain', 'Asia/Bahrain'), ('Asia/Baku', 'Asia/Baku'), ('Asia/Bangkok', 'Asia/Bangkok'), ('Asia/Barnaul', 'Asia/Barnaul'), ('Asia/Beirut', 'Asia/Beirut'), ('Asia/Bishkek', 'Asia/Bishkek'), ('Asia/Brunei', 'Asia/Brunei'), ('Asia/Calcutta', 'Asia/Calcutta'), ('Asia/Chita', 'Asia/Chita'), ('Asia/Choibalsan', 'Asia/Choibalsan'), ('Asia/Chongqing', 'Asia/Chongqing'), ('Asia/Chungking', 'Asia/Chungking'), ('Asia/Colombo', 'Asia/Colombo'), ('Asia/Dacca', 'Asia/Dacca'), ('Asia/Damascus', 'Asia/Damascus'), ('Asia/Dhaka', 'Asia/Dhaka'), ('Asia/Dili', 'Asia/Dili'), ('Asia/Dubai', 'Asia/Dubai'), ('Asia/Dushanbe', 'Asia/Dushanbe'), ('Asia/Famagusta', 'Asia/Famagusta'), ('Asia/Gaza', 'Asia/Gaza'), ('Asia/Harbin', 'Asia/Harbin'), ('Asia/Hebron', 'Asia/Hebron'), ('Asia/Ho_Chi_Minh', 'Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', 'Asia/Hong_Kong'), ('Asia/Hovd', 'Asia/Hovd'), ('Asia/Irkutsk', 'Asia/Irkutsk'), ('Asia/Istanbul', 'Asia/Istanbul'), ('Asia/Jakarta', 'Asia/Jakarta'), ('Asia/Jayapura', 'Asia/Jayapura'), ('Asia/Jerusalem', 'Asia/Jerusalem'), ('Asia/Kabul', 'Asia/Kabul'), ('Asia/Kamchatka', 'Asia/Kamchatka'), ('Asia/Karachi', 'Asia/Karachi'), ('Asia/Kashgar', 'Asia/Kashgar'), ('Asia/Kathmandu', 'Asia/Kathmandu'), ('Asia/Katmandu', 'Asia/Katmandu'), ('Asia/Khandyga', 'Asia/Khandyga'), ('Asia/Kolkata', 'Asia/Kolkata'), ('Asia/Krasnoyarsk', 'Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', 'Asia/Kuala_Lumpur'), ('Asia/Kuching', 'Asia/Kuching'), ('Asia/Kuwait', 'Asia/Kuwait'), ('Asia/Macao', 'Asia/Macao'), ('Asia/Macau', 'Asia/Macau'), ('Asia/Magadan', 'Asia/Magadan'), ('Asia/Makassar', 'Asia/Makassar'), ('Asia/Manila', 'Asia/Manila'), ('Asia/Muscat', 'Asia/Muscat'), ('Asia/Nicosia', 'Asia/Nicosia'), ('Asia/Novokuznetsk', 'Asia/Novokuznetsk'), ('Asia/Novosibirsk', 'Asia/Novosibirsk'), ('Asia/Omsk', 'Asia/Omsk'), ('Asia/Oral', 'Asia/Oral'), ('Asia/Phnom_Penh', 'Asia/Phnom_Penh'), ('Asia/Pontianak', 'Asia/Pontianak'), ('Asia/Pyongyang', 'Asia/Pyongyang'), ('Asia/Qatar', 'Asia/Qatar'), ('Asia/Qostanay', 'Asia/Qostanay'), ('Asia/Qyzylorda', 'Asia/Qyzylorda'), ('Asia/Rangoon', 'Asia/Rangoon'), ('Asia/Riyadh', 'Asia/Riyadh'), ('Asia/Saigon', 'Asia/Saigon'), ('Asia/Sakhalin', 'Asia/Sakhalin'), ('Asia/Samarkand', 'Asia/Samarkand'), ('Asia/Seoul', 'Asia/Seoul'), ('Asia/Shanghai', 'Asia/Shanghai'), ('Asia/Singapore', 'Asia/Singapore'), ('Asia/Srednekolymsk', 'Asia/Srednekolymsk'), ('Asia/Taipei', 'Asia/Taipei'), ('Asia/Tashkent', 'Asia/Tashkent'), ('Asia/Tbilisi', 'Asia/Tbilisi'), ('Asia/Tehran', 'Asia/Tehran'), ('Asia/Tel_Aviv', 'Asia/Tel_Aviv'), ('Asia/Thimbu', 'Asia/Thimbu'), ('Asia/Thimphu', 'Asia/Thimphu'), ('Asia/Tokyo', 'Asia/Tokyo'), ('Asia/Tomsk', 'Asia/Tomsk'), ('Asia/Ujung_Pandang', 'Asia/Ujung_Pandang'), ('Asia/Ulaanbaatar', 'Asia/Ulaanbaatar'), ('Asia/Ulan_Bator', 'Asia/Ulan_Bator'), ('Asia/Urumqi', 'Asia/Urumqi'), ('Asia/Ust-Nera', 'Asia/Ust-Nera'), ('Asia/Vientiane', 'Asia/Vientiane'), ('Asia/Vladivostok', 'Asia/Vladivostok'), ('Asia/Yakutsk', 'Asia/Yakutsk'), ('Asia/Yangon', 'Asia/Yangon'), ('Asia/Yekaterinburg', 'Asia/Yekaterinburg'), ('Asia/Yerevan', 'Asia/Yerevan'), ('Atlantic/Azores', 'Atlantic/Azores'), ('Atlantic/Bermuda', 'Atlantic/Bermuda'), ('Atlantic/Canary', 'Atlantic/Canary'), ('Atlantic/Cape_Verde', 'Atlantic/Cape_Verde'), ('Atlantic/Faeroe', 'Atlantic/Faeroe'), ('Atlantic/Faroe', 'Atlantic/Faroe'), ('Atlantic/Jan_Mayen', 'Atlantic/Jan_Mayen'), ('Atlantic/Madeira', 'Atlantic/Madeira'), ('Atlantic/Reykjavik', 'Atlantic/Reykjavik'), ('Atlantic/South_Georgia', 'Atlantic/South_Georgia'), ('Atlantic/St_Helena', 'Atlantic/St_Helena'), ('Atlantic/Stanley', 'Atlantic/Stanley'), ('Australia/ACT', 'Australia/ACT'), ('Australia/Adelaide', 'Australia/Adelaide'), ('Australia/Brisbane', 'Australia/Brisbane'), ('Australia/Broken_Hill', 'Australia/Broken_Hill'), ('Australia/Canberra', 'Australia/Canberra'), ('Australia/Currie', 'Australia/Currie'), ('Australia/Darwin', 'Australia/Darwin'), ('Australia/Eucla', 'Australia/Eucla'), ('Australia/Hobart', 'Australia/Hobart'), ('Australia/LHI', 'Australia/LHI'), ('Australia/Lindeman', 'Australia/Lindeman'), ('Australia/Lord_Howe', 'Australia/Lord_Howe'), ('Australia/Melbourne', 'Australia/Melbourne'), ('Australia/NSW', 'Australia/NSW'), ('Australia/North', 'Australia/North'), ('Australia/Perth', 'Australia/Perth'), ('Australia/Queensland', 'Australia/Queensland'), ('Australia/South', 'Australia/South'), ('Australia/Sydney', 'Australia/Sydney'), ('Australia/Tasmania', 'Australia/Tasmania'), ('Australia/Victoria', 'Australia/Victoria'), ('Australia/West', 'Australia/West'), ('Australia/Yancowinna', 'Australia/Yancowinna'), ('Brazil/Acre', 'Brazil/Acre'), ('Brazil/DeNoronha', 'Brazil/DeNoronha'), ('Brazil/East', 'Brazil/East'), ('Brazil/West', 'Brazil/West'), ('CET', 'CET'), ('CST6CDT', 'CST6CDT'), ('Canada/Atlantic', 'Canada/Atlantic'), ('Canada/Central', 'Canada/Central'), ('Canada/Eastern', 'Canada/Eastern'), ('Canada/Mountain', 'Canada/Mountain'), ('Canada/Newfoundland', 'Canada/Newfoundland'), ('Canada/Pacific', 'Canada/Pacific'), ('Canada/Saskatchewan', 'Canada/Saskatchewan'), ('Canada/Yukon', 'Canada/Yukon'), ('Chile/Continental', 'Chile/Continental'), ('Chile/EasterIsland', 'Chile/EasterIsland'), ('Cuba', 'Cuba'), ('EET', 'EET'), ('EST', 'EST'), ('EST5EDT', 'EST5EDT'), ('Egypt', 'Egypt'), ('Eire', 'Eire'), ('Etc/GMT', 'Etc/GMT'), ('Etc/GMT+0', 'Etc/GMT+0'), ('Etc/GMT+1', 'Etc/GMT+1'), ('Etc/GMT+10', 'Etc/GMT+10'), ('Etc/GMT+11', 'Etc/GMT+11'), ('Etc/GMT+12', 'Etc/GMT+12'), ('Etc/GMT+2', 'Etc/GMT+2'), ('Etc/GMT+3', 'Etc/GMT+3'), ('Etc/GMT+4', 'Etc/GMT+4'), ('Etc/GMT+5', 'Etc/GMT+5'), ('Etc/GMT+6', 'Etc/GMT+6'), ('Etc/GMT+7', 'Etc/GMT+7'), ('Etc/GMT+8', 'Etc/GMT+8'), ('Etc/GMT+9', 'Etc/GMT+9'), ('Etc/GMT-0', 'Etc/GMT-0'), ('Etc/GMT-1', 'Etc/GMT-1'), ('Etc/GMT-10', 'Etc/GMT-10'), ('Etc/GMT-11', 'Etc/GMT-11'), ('Etc/GMT-12', 'Etc/GMT-12'), ('Etc/GMT-13', 'Etc/GMT-13'), ('Etc/GMT-14', 'Etc/GMT-14'), ('Etc/GMT-2', 'Etc/GMT-2'), ('Etc/GMT-3', 'Etc/GMT-3'), ('Etc/GMT-4', 'Etc/GMT-4'), ('Etc/GMT-5', 'Etc/GMT-5'), ('Etc/GMT-6', 'Etc/GMT-6'), ('Etc/GMT-7', 'Etc/GMT-7'), ('Etc/GMT-8', 'Etc/GMT-8'), ('Etc/GMT-9', 'Etc/GMT-9'), ('Etc/GMT0', 'Etc/GMT0'), ('Etc/Greenwich', 'Etc/Greenwich'), ('Etc/UCT', 'Etc/UCT'), ('Etc/UTC', 'Etc/UTC'), ('Etc/Universal', 'Etc/Universal'), ('Etc/Zulu', 'Etc/Zulu'), ('Europe/Amsterdam', 'Europe/Amsterdam'), ('Europe/Andorra', 'Europe/Andorra'), ('Europe/Astrakhan', 'Europe/Astrakhan'), ('Europe/Athens', 'Europe/Athens'), ('Europe/Belfast', 'Europe/Belfast'), ('Europe/Belgrade', 'Europe/Belgrade'), ('Europe/Berlin', 'Europe/Berlin'), ('Europe/Bratislava', 'Europe/Bratislava'), ('Europe/Brussels', 'Europe/Brussels'), ('Europe/Bucharest', 'Europe/Bucharest'), ('Europe/Budapest', 'Europe/Budapest'), ('Europe/Busingen', 'Europe/Busingen'), ('Europe/Chisinau', 'Europe/Chisinau'), ('Europe/Copenhagen', 'Europe/Copenhagen'), ('Europe/Dublin', 'Europe/Dublin'), ('Europe/Gibraltar', 'Europe/Gibraltar'), ('Europe/Guernsey', 'Europe/Guernsey'), ('Europe/Helsinki', 'Europe/Helsinki'), ('Europe/Isle_of_Man', 'Europe/Isle_of_Man'), ('Europe/Istanbul', 'Europe/Istanbul'), ('Europe/Jersey', 'Europe/Jersey'), ('Europe/Kaliningrad', 'Europe/Kaliningrad'), ('Europe/Kiev', 'Europe/Kiev'), ('Europe/Kirov', 'Europe/Kirov'), ('Europe/Kyiv', 'Europe/Kyiv'), ('Europe/Lisbon', 'Europe/Lisbon'), ('Europe/Ljubljana', 'Europe/Ljubljana'), ('Europe/London', 'Europe/London'), ('Europe/Luxembourg', 'Europe/Luxembourg'), ('Europe/Madrid', 'Europe/Madrid'), ('Europe/Malta', 'Europe/Malta'), ('Europe/Mariehamn', 'Europe/Mariehamn'), ('Europe/Minsk', 'Europe/Minsk'), ('Europe/Monaco', 'Europe/Monaco'), ('Europe/Moscow', 'Europe/Moscow'), ('Europe/Nicosia', 'Europe/Nicosia'), ('Europe/Oslo', 'Europe/Oslo'), ('Europe/Paris', 'Europe/Paris'), ('Europe/Podgorica', 'Europe/Podgorica'), ('Europe/Prague', 'Europe/Prague'), ('Europe/Riga', 'Europe/Riga'), ('Europe/Rome', 'Europe/Rome'), ('Europe/Samara', 'Europe/Samara'), ('Europe/San_Marino', 'Europe/San_Marino'), ('Europe/Sarajevo', 'Europe/Sarajevo'), ('Europe/Saratov', 'Europe/Saratov'), ('Europe/Simferopol', 'Europe/Simferopol'), ('Europe/Skopje', 'Europe/Skopje'), ('Europe/Sofia', 'Europe/Sofia'), ('Europe/Stockholm', 'Europe/Stockholm'), ('Europe/Tallinn', 'Europe/Tallinn'), ('Europe/Tirane', 'Europe/Tirane'), ('Europe/Tiraspol', 'Europe/Tiraspol'), ('Europe/Ulyanovsk', 'Europe/Ulyanovsk'), ('Europe/Uzhgorod', 'Europe/Uzhgorod'), ('Europe/Vaduz', 'Europe/Vaduz'), ('Europe/Vatican', 'Europe/Vatican'), ('Europe/Vienna', 'Europe/Vienna'), ('Europe/Vilnius', 'Europe/Vilnius'), ('Europe/Volgograd', 'Europe/Volgograd'), ('Europe/Warsaw', 'Europe/Warsaw'), ('Europe/Zagreb', 'Europe/Zagreb'), ('Europe/Zaporozhye', 'Europe/Zaporozhye'), ('Europe/Zurich', 'Europe/Zurich'), ('GB', 'GB'), ('GB-Eire', 'GB-Eire'), ('GMT', 'GMT'), ('GMT+0', 'GMT+0'), ('GMT-0', 'GMT-0'), ('GMT0', 'GMT0'), ('Greenwich', 'Greenwich'), ('HST', 'HST'), ('Hongkong', 'Hongkong'), ('Iceland', 'Iceland'), ('Indian/Antananarivo', 'Indian/Antananarivo'), ('Indian/Chagos', 'Indian/Chagos'), ('Indian/Christmas', 'Indian/Christmas'), ('Indian/Cocos', 'Indian/Cocos'), ('Indian/Comoro', 'Indian/Comoro'), ('Indian/Kerguelen', 'Indian/Kerguelen'), ('Indian/Mahe', 'Indian/Mahe'), ('Indian/Maldives', 'Indian/Maldives'), ('Indian/Mauritius', 'Indian/Mauritius'), ('Indian/Mayotte', 'Indian/Mayotte'), ('Indian/Reunion', 'Indian/Reunion'), ('Iran', 'Iran'), ('Israel', 'Israel'), ('Jamaica', 'Jamaica'), ('Japan', 'Japan'), ('Kwajalein', 'Kwajalein'), ('Libya', 'Libya'), ('MET', 'MET'), ('MST', 'MST'), ('MST7MDT', 'MST7MDT'), ('Mexico/BajaNorte', 'Mexico/BajaNorte'), ('Mexico/BajaSur', 'Mexico/BajaSur'), ('Mexico/General', 'Mexico/General'), ('NZ', 'NZ'), ('NZ-CHAT', 'NZ-CHAT'), ('Navajo', 'Navajo'), ('PRC', 'PRC'), ('PST8PDT', 'PST8PDT'), ('Pacific/Apia', 'Pacific/Apia'), ('Pacific/Auckland', 'Pacific/Auckland'), ('Pacific/Bougainville', 'Pacific/Bougainville'), ('Pacific/Chatham', 'Pacific/Chatham'), ('Pacific/Chuuk', 'Pacific/Chuuk'), ('Pacific/Easter', 'Pacific/Easter'), ('Pacific/Efate', 'Pacific/Efate'), ('Pacific/Enderbury', 'Pacific/Enderbury'), ('Pacific/Fakaofo', 'Pacific/Fakaofo'), ('Pacific/Fiji', 'Pacific/Fiji'), ('Pacific/Funafuti', 'Pacific/Funafuti'), ('Pacific/Galapagos', 'Pacific/Galapagos'), ('Pacific/Gambier', 'Pacific/Gambier'), ('Pacific/Guadalcanal', 'Pacific/Guadalcanal'), ('Pacific/Guam', 'Pacific/Guam'), ('Pacific/Honolulu', 'Pacific/Honolulu'), ('Pacific/Johnston', 'Pacific/Johnston'), ('Pacific/Kanton', 'Pacific/Kanton'), ('Pacific/Kiritimati', 'Pacific/Kiritimati'), ('Pacific/Kosrae', 'Pacific/Kosrae'), ('Pacific/Kwajalein', 'Pacific/Kwajalein'), ('Pacific/Majuro', 'Pacific/Majuro'), ('Pacific/Marquesas', 'Pacific/Marquesas'), ('Pacific/Midway', 'Pacific/Midway'), ('Pacific/Nauru', 'Pacific/Nauru'), ('Pacific/Niue', 'Pacific/Niue'), ('Pacific/Norfolk', 'Pacific/Norfolk'), ('Pacific/Noumea', 'Pacific/Noumea'), ('Pacific/Pago_Pago', 'Pacific/Pago_Pago'), ('Pacific/Palau', 'Pacific/Palau'), ('Pacific/Pitcairn', 'Pacific/Pitcairn'), ('Pacific/Pohnpei', 'Pacific/Pohnpei'), ('Pacific/Ponape', 'Pacific/Ponape'), ('Pacific/Port_Moresby', 'Pacific/Port_Moresby'), ('Pacific/Rarotonga', 'Pacific/Rarotonga'), ('Pacific/Saipan', 'Pacific/Saipan'), ('Pacific/Samoa', 'Pacific/Samoa'), ('Pacific/Tahiti', 'Pacific/Tahiti'), ('Pacific/Tarawa', 'Pacific/Tarawa'), ('Pacific/Tongatapu', 'Pacific/Tongatapu'), ('Pacific/Truk', 'Pacific/Truk'), ('Pacific/Wake', 'Pacific/Wake'), ('Pacific/Wallis', 'Pacific/Wallis'), ('Pacific/Yap', 'Pacific/Yap'), ('Poland', 'Poland'), ('Portugal', 'Portugal'), ('ROC', 'ROC'), ('ROK', 'ROK'), ('Singapore', 'Singapore'), ('Turkey', 'Turkey'), ('UCT', 'UCT'), ('US/Alaska', 'US/Alaska'), ('US/Aleutian', 'US/Aleutian'), ('US/Arizona', 'US/Arizona'), ('US/Central', 'US/Central'), ('US/East-Indiana', 'US/East-Indiana'), ('US/Eastern', 'US/Eastern'), ('US/Hawaii', 'US/Hawaii'), ('US/Indiana-Starke', 'US/Indiana-Starke'), ('US/Michigan', 'US/Michigan'), ('US/Mountain', 'US/Mountain'), ('US/Pacific', 'US/Pacific'), ('US/Samoa', 'US/Samoa'), ('UTC', 'UTC'), ('Universal', 'Universal'), ('W-SU', 'W-SU'), ('WET', 'WET'), ('Zulu', 'Zulu')], default='UTC', max_length=50),
+ ),
+ ]
diff --git a/dojo/db_migrations/0178_alter_answer_polymorphic_ctype_and_more.py b/dojo/db_migrations/0178_alter_answer_polymorphic_ctype_and_more.py
new file mode 100644
index 00000000000..fbb9ca58b8e
--- /dev/null
+++ b/dojo/db_migrations/0178_alter_answer_polymorphic_ctype_and_more.py
@@ -0,0 +1,30 @@
+# Generated by Django 4.1.5 on 2023-01-09 21:09
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('contenttypes', '0002_remove_content_type_name'),
+ ('dojo', '0177_alter_system_settings_time_zone'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='answer',
+ name='polymorphic_ctype',
+ field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype'),
+ ),
+ migrations.AlterField(
+ model_name='question',
+ name='polymorphic_ctype',
+ field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype'),
+ ),
+ migrations.AlterField(
+ model_name='rule',
+ name='child_rules',
+ field=models.ManyToManyField(editable=False, to='dojo.rule'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0179_alter_finding_verified.py b/dojo/db_migrations/0179_alter_finding_verified.py
new file mode 100644
index 00000000000..cbd308373c3
--- /dev/null
+++ b/dojo/db_migrations/0179_alter_finding_verified.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.5 on 2023-01-20 18:45
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0178_alter_answer_polymorphic_ctype_and_more'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='finding',
+ name='verified',
+ field=models.BooleanField(default=False, help_text='Denotes if this flaw has been manually verified by the tester.', verbose_name='Verified'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0180_announcement_userannouncement.py b/dojo/db_migrations/0180_announcement_userannouncement.py
new file mode 100644
index 00000000000..858356b13b3
--- /dev/null
+++ b/dojo/db_migrations/0180_announcement_userannouncement.py
@@ -0,0 +1,31 @@
+# Generated by Django 4.1.5 on 2023-01-30 07:38
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0179_alter_finding_verified'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Announcement',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('message', models.CharField(default='', help_text="This dismissable message will be displayed on all pages for authenticated users. It can contain basic html tags, for example https://example.com", max_length=500)),
+ ('dismissable', models.BooleanField(blank=True, default=False, null=True)),
+ ('style', models.CharField(choices=[('info', 'Info'), ('success', 'Success'), ('warning', 'Warning'), ('danger', 'Danger')], default='info', help_text='The style of banner to display. (info, success, warning, danger)', max_length=64)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='UserAnnouncement',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('announcement', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_announcement', to='dojo.announcement')),
+ ('user', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user')),
+ ],
+ ),
+ ]
diff --git a/dojo/db_migrations/0181_jira_instance_finding_jira_sync.py b/dojo/db_migrations/0181_jira_instance_finding_jira_sync.py
new file mode 100644
index 00000000000..16e43ceaeea
--- /dev/null
+++ b/dojo/db_migrations/0181_jira_instance_finding_jira_sync.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.7 on 2023-03-02 12:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0180_announcement_userannouncement'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='jira_instance',
+ name='finding_jira_sync',
+ field=models.BooleanField(default=False, help_text='If enabled, this will sync changes to a Finding automatically to JIRA', verbose_name='Automatically sync Findings with JIRA?'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0182_alter_jira_instance_default_issue_type.py b/dojo/db_migrations/0182_alter_jira_instance_default_issue_type.py
new file mode 100644
index 00000000000..7260701ac89
--- /dev/null
+++ b/dojo/db_migrations/0182_alter_jira_instance_default_issue_type.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.7 on 2023-03-06 11:38
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0181_jira_instance_finding_jira_sync'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='jira_instance',
+ name='default_issue_type',
+ field=models.CharField(choices=[('Task', 'Task'), ('Story', 'Story'), ('Epic', 'Epic'), ('Spike', 'Spike'), ('Bug', 'Bug'), ('Security', 'Security')], default='Bug', help_text='You can define extra issue types in settings.py', max_length=255),
+ ),
+ ]
diff --git a/dojo/db_migrations/0183_system_settings_enable_notify_sla_exponential_backoff_and_more.py b/dojo/db_migrations/0183_system_settings_enable_notify_sla_exponential_backoff_and_more.py
new file mode 100644
index 00000000000..4e1a33e2bd9
--- /dev/null
+++ b/dojo/db_migrations/0183_system_settings_enable_notify_sla_exponential_backoff_and_more.py
@@ -0,0 +1,33 @@
+# Generated by Django 4.1.7 on 2023-03-20 12:22
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0182_alter_jira_instance_default_issue_type'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_notify_sla_exponential_backoff',
+ field=models.BooleanField(default=False, help_text='Enable an exponential backoff strategy for SLA breach notifications, e.g. 1, 2, 4, 8, etc. Otherwise it alerts every day', verbose_name='Enable an exponential backoff strategy for SLA breach notifications.'),
+ ),
+ migrations.AlterField(
+ model_name='system_settings',
+ name='enable_notify_sla_active',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for active Findings.", verbose_name="Enable Notify SLA's Breach for active Findings"),
+ ),
+ migrations.AlterField(
+ model_name='system_settings',
+ name='enable_notify_sla_active_verified',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for active, verified Findings.", verbose_name="Enable Notify SLA's Breach for active, verified Findings"),
+ ),
+ migrations.AlterField(
+ model_name='system_settings',
+ name='enable_notify_sla_jira_only',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for Findings that are linked to JIRA issues.", verbose_name="Enable Notify SLA's Breach for Findings linked to JIRA"),
+ ),
+ ]
diff --git a/dojo/db_migrations/0184_remove_child_rule_parent_rule_delete_fieldrule_and_more.py b/dojo/db_migrations/0184_remove_child_rule_parent_rule_delete_fieldrule_and_more.py
new file mode 100644
index 00000000000..57c3d651227
--- /dev/null
+++ b/dojo/db_migrations/0184_remove_child_rule_parent_rule_delete_fieldrule_and_more.py
@@ -0,0 +1,54 @@
+# Generated by Django 4.1.7 on 2023-03-27 15:16
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0183_system_settings_enable_notify_sla_exponential_backoff_and_more'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='child_rule',
+ name='parent_rule',
+ ),
+ migrations.DeleteModel(
+ name='FieldRule',
+ ),
+ migrations.RemoveField(
+ model_name='rule',
+ name='child_rules',
+ ),
+ migrations.RemoveField(
+ model_name='rule',
+ name='parent_rule',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='column_widths',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='drive_folder_ID',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='email_address',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='enable_google_sheets',
+ ),
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='enable_rules_framework',
+ ),
+ migrations.DeleteModel(
+ name='Child_Rule',
+ ),
+ migrations.DeleteModel(
+ name='Rule',
+ ),
+ ]
diff --git a/dojo/db_migrations/0185_product_disable_sla_breach_notifications_and_more.py b/dojo/db_migrations/0185_product_disable_sla_breach_notifications_and_more.py
new file mode 100644
index 00000000000..0cca3086753
--- /dev/null
+++ b/dojo/db_migrations/0185_product_disable_sla_breach_notifications_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.1.7 on 2023-04-04 03:13
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0184_remove_child_rule_parent_rule_delete_fieldrule_and_more'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='product',
+ name='disable_sla_breach_notifications',
+ field=models.BooleanField(default=False, help_text='Disable SLA breach notifications if configured in the global settings', verbose_name='Disable SLA breach notifications'),
+ ),
+ migrations.AlterField(
+ model_name='system_settings',
+ name='enable_notify_sla_jira_only',
+ field=models.BooleanField(default=False, help_text="Enables Notify when time to remediate according to Finding SLA's is breached for Findings that are linked to JIRA issues. Notification is disabled for Findings not linked to JIRA issues", verbose_name="Enable Notify SLA's Breach only for Findings linked to JIRA"),
+ ),
+ ]
diff --git a/dojo/db_migrations/0186_system_settings_non_common_password_required.py b/dojo/db_migrations/0186_system_settings_non_common_password_required.py
new file mode 100644
index 00000000000..d7df9f56bc9
--- /dev/null
+++ b/dojo/db_migrations/0186_system_settings_non_common_password_required.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.7 on 2023-05-09 17:41
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0185_product_disable_sla_breach_notifications_and_more'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='non_common_password_required',
+ field=models.BooleanField(default=True, help_text='Requires user passwords to not be part of list of common passwords.', verbose_name='Password must not be common'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0187_nessus_to_tenable.py b/dojo/db_migrations/0187_nessus_to_tenable.py
new file mode 100644
index 00000000000..d3c734c15a9
--- /dev/null
+++ b/dojo/db_migrations/0187_nessus_to_tenable.py
@@ -0,0 +1,59 @@
+from django.db import migrations
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+NESSUS_REFERENCES = ['Nessus Scan', 'Nessus WAS Scan']
+
+
+# update the test type object as well as the scan type name
+def update_test(test, tenable_test_type) -> None:
+ if test.test_type.name in NESSUS_REFERENCES or test.scan_type in NESSUS_REFERENCES:
+ test.test_type = tenable_test_type
+ test.scan_type = tenable_test_type.name
+ test.save()
+
+
+# Update the found_by field to remove nessus/WAS and add tenable
+def update_finding(finding, tenable_test_type, nessus_test_type, nessus_was_test_type) -> None:
+ # Check if nessus is in found by list and remove
+ if nessus_test_type in finding.found_by.all():
+ finding.found_by.remove(nessus_test_type.id)
+ # Check if nessus WAS is in found by list and remove
+ if nessus_was_test_type in finding.found_by.all():
+ finding.found_by.remove(nessus_was_test_type.id)
+ # Check if tenable is already in list somehow before adding it
+ if tenable_test_type not in finding.found_by.all():
+ finding.found_by.add(tenable_test_type.id)
+ finding.save()
+
+
+# Update all finding objects that came from nessus/WAS reports
+def migrate_nessus_findings_to_tenable(apps, schema_editor):
+ finding_model = apps.get_model('dojo', 'Finding')
+ test_type_model = apps.get_model('dojo', 'Test_Type')
+ # Get or create Tenable Test Type and fetch the nessus and nessus WAS test types
+ tenable_test_type, _ = test_type_model.objects.get_or_create(name="Tenable Scan", active=True)
+ nessus_test_type = test_type_model.objects.filter(name="Nessus Scan").first()
+ nessus_was_test_type = test_type_model.objects.filter(name="Nessus WAS Scan").first()
+ # Get all the findings found by Nessus and Nessus WAS
+ findings = finding_model.objects.filter(test__scan_type__in=NESSUS_REFERENCES)
+ logger.warning(f'We identified {findings.count()} Nessus/NessusWAS findings to migrate to Tenable findings')
+ # Iterate over all findings and change
+ for finding in findings:
+ # Update the found by field
+ update_finding(finding, tenable_test_type, nessus_test_type, nessus_was_test_type)
+ # Update the test object
+ update_test(finding.test, tenable_test_type)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0186_system_settings_non_common_password_required'),
+ ]
+
+ operations = [
+ migrations.RunPython(migrate_nessus_findings_to_tenable),
+ ]
diff --git a/dojo/db_migrations/0188_product_enable_product_tag_inheritance_and_more.py b/dojo/db_migrations/0188_product_enable_product_tag_inheritance_and_more.py
new file mode 100644
index 00000000000..fc6f20f5fd8
--- /dev/null
+++ b/dojo/db_migrations/0188_product_enable_product_tag_inheritance_and_more.py
@@ -0,0 +1,109 @@
+# Generated by Django 4.1.7 on 2023-05-10 00:36
+
+from django.db import migrations, models
+import tagulous.models.fields
+import tagulous.models.models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0187_nessus_to_tenable'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='product',
+ name='enable_product_tag_inheritance',
+ field=models.BooleanField(default=False, help_text='Enables product tag inheritance. Any tags added on a product will automatically be added to all Engagements, Tests, and Findings', verbose_name='Enable Product Tag Inheritance'),
+ ),
+ migrations.AddField(
+ model_name='system_settings',
+ name='enable_product_tag_inheritance',
+ field=models.BooleanField(default=False, help_text='Enables product tag inheritance globally for all products. Any tags added on a product will automatically be added to all Engagements, Tests, and Findings', verbose_name='Enable Product Tag Inheritance'),
+ ),
+ migrations.CreateModel(
+ name='Tagulous_Test_inherited_tags',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255, unique=True)),
+ ('slug', models.SlugField()),
+ ('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
+ ('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
+ ],
+ options={
+ 'ordering': ('name',),
+ 'abstract': False,
+ 'unique_together': {('slug',)},
+ },
+ bases=(tagulous.models.models.BaseTagModel, models.Model),
+ ),
+ migrations.CreateModel(
+ name='Tagulous_Finding_inherited_tags',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255, unique=True)),
+ ('slug', models.SlugField()),
+ ('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
+ ('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
+ ],
+ options={
+ 'ordering': ('name',),
+ 'abstract': False,
+ 'unique_together': {('slug',)},
+ },
+ bases=(tagulous.models.models.BaseTagModel, models.Model),
+ ),
+ migrations.CreateModel(
+ name='Tagulous_Engagement_inherited_tags',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255, unique=True)),
+ ('slug', models.SlugField()),
+ ('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
+ ('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
+ ],
+ options={
+ 'ordering': ('name',),
+ 'abstract': False,
+ 'unique_together': {('slug',)},
+ },
+ bases=(tagulous.models.models.BaseTagModel, models.Model),
+ ),
+ migrations.CreateModel(
+ name='Tagulous_Endpoint_inherited_tags',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255, unique=True)),
+ ('slug', models.SlugField()),
+ ('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
+ ('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
+ ],
+ options={
+ 'ordering': ('name',),
+ 'abstract': False,
+ 'unique_together': {('slug',)},
+ },
+ bases=(tagulous.models.models.BaseTagModel, models.Model),
+ ),
+ migrations.AddField(
+ model_name='endpoint',
+ name='inherited_tags',
+ field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Internal use tags sepcifically for maintaining parity with product. This field will be present as a subset in the tags field', to='dojo.tagulous_endpoint_inherited_tags'),
+ ),
+ migrations.AddField(
+ model_name='engagement',
+ name='inherited_tags',
+ field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Internal use tags sepcifically for maintaining parity with product. This field will be present as a subset in the tags field', to='dojo.tagulous_engagement_inherited_tags'),
+ ),
+ migrations.AddField(
+ model_name='finding',
+ name='inherited_tags',
+ field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Internal use tags sepcifically for maintaining parity with product. This field will be present as a subset in the tags field', to='dojo.tagulous_finding_inherited_tags'),
+ ),
+ migrations.AddField(
+ model_name='test',
+ name='inherited_tags',
+ field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Internal use tags sepcifically for maintaining parity with product. This field will be present as a subset in the tags field', to='dojo.tagulous_test_inherited_tags'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0189_finding_effort_and_remediation_for_fixing.py b/dojo/db_migrations/0189_finding_effort_and_remediation_for_fixing.py
new file mode 100644
index 00000000000..34be4712e09
--- /dev/null
+++ b/dojo/db_migrations/0189_finding_effort_and_remediation_for_fixing.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.1.5 on 2023-02-16 17:10
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0188_product_enable_product_tag_inheritance_and_more'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='finding',
+ name='planned_remediation_version',
+ field=models.CharField(blank=True, help_text='The target version when the vulnerability should be fixed / remediated', max_length=99, null=True, verbose_name='Planned remediation version'),
+ ),
+ migrations.AddField(
+ model_name='finding',
+ name='effort_for_fixing',
+ field=models.CharField(blank=True, help_text='Effort for fixing / remediating the vulnerability (Low, Medium, High)', max_length=99, null=True, verbose_name='Effort for fixing'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0190_system_settings_experimental_fp_history.py b/dojo/db_migrations/0190_system_settings_experimental_fp_history.py
new file mode 100644
index 00000000000..8ae1996217b
--- /dev/null
+++ b/dojo/db_migrations/0190_system_settings_experimental_fp_history.py
@@ -0,0 +1,23 @@
+# Generated by Django 3.2.16 on 2022-11-09 04:35
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0189_finding_effort_and_remediation_for_fixing'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='system_settings',
+ name='retroactive_false_positive_history',
+ field=models.BooleanField(default=False, help_text='(EXPERIMENTAL) FP History will also retroactively mark/unmark all existing equal findings in the same product as a false positives. Only works if the False Positive History feature is also enabled.'),
+ ),
+ migrations.AlterField(
+ model_name='system_settings',
+ name='false_positive_history',
+ field=models.BooleanField(default=False, help_text="(EXPERIMENTAL) DefectDojo will automatically mark the finding as a false positive if an equal finding (according to its dedupe algorithm) has been previously marked as a false positive on the same product. ATTENTION: Although the deduplication algorithm is used to determine if a finding should be marked as a false positive, this feature will not work if deduplication is enabled since it doesn't make sense to use both."),
+ ),
+ ]
diff --git a/dojo/db_migrations/0191_alter_notifications_risk_acceptance_expiration.py b/dojo/db_migrations/0191_alter_notifications_risk_acceptance_expiration.py
new file mode 100644
index 00000000000..f7526915166
--- /dev/null
+++ b/dojo/db_migrations/0191_alter_notifications_risk_acceptance_expiration.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.1.11 on 2023-10-22 20:50
+
+from django.db import migrations
+import multiselectfield.db.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0190_system_settings_experimental_fp_history'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='notifications',
+ name='risk_acceptance_expiration',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) Risk Acceptance expiries', max_length=24, verbose_name='Risk Acceptance Expiration'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0192_notifications_scan_added_empty.py b/dojo/db_migrations/0192_notifications_scan_added_empty.py
new file mode 100644
index 00000000000..bd444af76c2
--- /dev/null
+++ b/dojo/db_migrations/0192_notifications_scan_added_empty.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.1.11 on 2023-11-08 20:33
+
+from django.db import migrations
+import multiselectfield.db.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0191_alter_notifications_risk_acceptance_expiration'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notifications',
+ name='scan_added_empty',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=[], help_text='Triggered whenever an (re-)import has been done (even if that created/updated/closed no findings).', max_length=24),
+ ),
+ ]
diff --git a/dojo/db_migrations/0193_remove_system_settings_enable_auditlog.py b/dojo/db_migrations/0193_remove_system_settings_enable_auditlog.py
new file mode 100644
index 00000000000..c5c563be034
--- /dev/null
+++ b/dojo/db_migrations/0193_remove_system_settings_enable_auditlog.py
@@ -0,0 +1,17 @@
+# Generated by Django 4.1.11 on 2023-11-12 12:06
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0192_notifications_scan_added_empty'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='system_settings',
+ name='enable_auditlog',
+ ),
+ ]
diff --git a/dojo/db_migrations/0194_alter_finding_component_name.py b/dojo/db_migrations/0194_alter_finding_component_name.py
new file mode 100644
index 00000000000..46be3d3bdc4
--- /dev/null
+++ b/dojo/db_migrations/0194_alter_finding_component_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.13 on 2023-12-25 22:40
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0193_remove_system_settings_enable_auditlog'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='finding',
+ name='component_name',
+ field=models.CharField(blank=True, help_text='Name of the affected component (library name, part of a system, ...).', max_length=500, null=True, verbose_name='Component name'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0195_alter_announcement_dismissable.py b/dojo/db_migrations/0195_alter_announcement_dismissable.py
new file mode 100644
index 00000000000..0896390bae2
--- /dev/null
+++ b/dojo/db_migrations/0195_alter_announcement_dismissable.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.13 on 2023-12-05 12:53
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0194_alter_finding_component_name'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='announcement',
+ name='dismissable',
+ field=models.BooleanField(blank=True, default=False, help_text='Ticking this box allows users to dismiss the current announcement', verbose_name='Dismissable?'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0196_notifications_sla_breach_combined.py b/dojo/db_migrations/0196_notifications_sla_breach_combined.py
new file mode 100644
index 00000000000..f37cf6fda3f
--- /dev/null
+++ b/dojo/db_migrations/0196_notifications_sla_breach_combined.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.1.10 on 2023-09-12 11:29
+
+from django.db import migrations
+import multiselectfield.db.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0195_alter_announcement_dismissable'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notifications',
+ name='sla_breach_combined',
+ field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default=('alert', 'alert'), help_text='Get notified of (upcoming) SLA breaches (a message per project)', max_length=24, verbose_name='SLA breach (combined)'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0197_parser_merge.py b/dojo/db_migrations/0197_parser_merge.py
new file mode 100644
index 00000000000..613ebea02fa
--- /dev/null
+++ b/dojo/db_migrations/0197_parser_merge.py
@@ -0,0 +1,98 @@
+from django.db import migrations
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+OPENVAS_REFERENCES = ['OpenVAS CSV', 'OpenVAS XML']
+CLAIRKLAR_REFERENCES = ['Clair Klar Scan']
+
+
+# update the test type object as well as the scan type name
+def update_openvas_test(test, openvas_test_type) -> None:
+ if test.test_type.name in OPENVAS_REFERENCES or test.scan_type in OPENVAS_REFERENCES:
+ test.test_type = openvas_test_type
+ test.scan_type = openvas_test_type.name
+ test.save()
+
+
+def update_clairklar_test(test, clairklar_test_type) -> None:
+ if test.test_type.name in CLAIRKLAR_REFERENCES or test.scan_type in CLAIRKLAR_REFERENCES:
+ test.test_type = clairklar_test_type
+ test.scan_type = clairklar_test_type.name
+ test.save()
+
+
+# Update the found_by field to remove OpenVAS CSV/ OpenVAS XML and add OpenVAS Parser
+def update_openvas_finding(finding, openvas_test_type, openvascsv_test_type, openvasxml_test_type) -> None:
+ # Check if nessus is in found by list and remove
+ if openvascsv_test_type in finding.found_by.all():
+ finding.found_by.remove(openvascsv_test_type.id)
+ # Check if nessus WAS is in found by list and remove
+ if openvasxml_test_type in finding.found_by.all():
+ finding.found_by.remove(openvasxml_test_type.id)
+ # Check if tenable is already in list somehow before adding it
+ if openvas_test_type not in finding.found_by.all():
+ finding.found_by.add(openvas_test_type.id)
+ finding.save()
+
+
+# Update the found_by field to remove Clair Klar Scan and add Clair Scan
+def update_clairklar_finding(finding, clair_test_type, clairklar_test_type) -> None:
+ # Check if nessus is in found by list and remove
+ if clairklar_test_type in finding.found_by.all():
+ finding.found_by.remove(clairklar_test_type.id)
+ # Check if tenable is already in list somehow before adding it
+ if clair_test_type not in finding.found_by.all():
+ finding.found_by.add(clair_test_type.id)
+ finding.save()
+
+
+# Update all finding objects that came from OpenVAS CSV /XML reports
+def migrate_openvas_parsers(apps, schema_editor):
+ finding_model = apps.get_model('dojo', 'Finding')
+ test_type_model = apps.get_model('dojo', 'Test_Type')
+ # Get or create OpenVAS Test Type and fetch the OpenVAS XML and OpenVAS CSV test types
+ openvas_test_type, _ = test_type_model.objects.get_or_create(name="OpenVAS Parser", active=True)
+ openvascsv_test_type = test_type_model.objects.filter(name="OpenVAS CSV").first()
+ openvasxml_test_type = test_type_model.objects.filter(name="OpenVAS XML").first()
+ # Get all the findings found by Nessus and Nessus WAS
+ findings = finding_model.objects.filter(test__scan_type__in=OPENVAS_REFERENCES)
+ logger.warning(f'We identified {findings.count()} OpenVAS CSV/ OpenVAS XML findings to migrate to OpenVAS Parser findings')
+ # Iterate over all findings and change
+ for finding in findings:
+ # Update the found by field
+ update_openvas_finding(finding, openvas_test_type, openvascsv_test_type, openvasxml_test_type)
+ # Update the test object
+ update_openvas_test(finding.test, openvas_test_type)
+
+
+# Update all finding objects that came from Clair Klar reports
+def migrate_clairklar_parsers(apps, schema_editor):
+ finding_model = apps.get_model('dojo', 'Finding')
+ test_type_model = apps.get_model('dojo', 'Test_Type')
+ # Get or create Clair Scan Test Type and fetch the Clair Klar Scan test types
+ clair_test_type, _ = test_type_model.objects.get_or_create(name="Clair Scan", active=True)
+ clairklar_test_type = test_type_model.objects.filter(name="Clair Klar Scan").first()
+ # Get all the findings found by Clair Klar Scan
+ findings = finding_model.objects.filter(test__scan_type__in=CLAIRKLAR_REFERENCES)
+ logger.warning(f'We identified {findings.count()} Clair Klar Scan findings to migrate to Clair Scan findings')
+ # Iterate over all findings and change
+ for finding in findings:
+ # Update the found by field
+ update_clairklar_finding(finding, clair_test_type, clairklar_test_type)
+ # Update the test object
+ update_clairklar_test(finding.test, clair_test_type)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0196_notifications_sla_breach_combined'),
+ ]
+
+ operations = [
+ migrations.RunPython(migrate_openvas_parsers),
+ migrations.RunPython(migrate_clairklar_parsers),
+ ]
diff --git a/dojo/db_migrations/0198_alter_system_settings_enable_deduplication.py b/dojo/db_migrations/0198_alter_system_settings_enable_deduplication.py
new file mode 100644
index 00000000000..146754203c6
--- /dev/null
+++ b/dojo/db_migrations/0198_alter_system_settings_enable_deduplication.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.13 on 2024-01-31 18:53
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0197_parser_merge'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='system_settings',
+ name='enable_deduplication',
+ field=models.BooleanField(default=False, help_text='With this setting turned on, DefectDojo deduplicates findings by comparing endpoints, cwe fields, and titles. If two findings share a URL and have the same CWE or title, DefectDojo marks the recent finding as a duplicate. When deduplication is enabled, a list of deduplicated findings is added to the engagement view.', verbose_name='Deduplicate findings'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0199_whitesource_to_mend.py b/dojo/db_migrations/0199_whitesource_to_mend.py
new file mode 100644
index 00000000000..7620542cc7d
--- /dev/null
+++ b/dojo/db_migrations/0199_whitesource_to_mend.py
@@ -0,0 +1,58 @@
+from django.db import migrations
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+WHITESOURCE_REFERENCES = ['Whitesource Scan']
+
+
+# update the test type object as well as the scan type name
+def update_test(test, mend_test_type) -> None:
+ if test.test_type.name in WHITESOURCE_REFERENCES or test.scan_type in WHITESOURCE_REFERENCES:
+ test.test_type = mend_test_type
+ test.scan_type = mend_test_type.name
+ test.save()
+
+
+# Update the found_by field to remove whitesource and add mend
+def update_finding(finding, mend_test_type, whitesource_test_type) -> None:
+ # Check if whitesource is in found by list and remove
+ if whitesource_test_type in finding.found_by.all():
+ finding.found_by.remove(whitesource_test_type.id)
+ # Check if whitesource is in found by list and remove
+ if whitesource_test_type in finding.found_by.all():
+ finding.found_by.remove(whitesource_test_type.id)
+ # Check if mend is already in list somehow before adding it
+ if mend_test_type not in finding.found_by.all():
+ finding.found_by.add(mend_test_type.id)
+ finding.save()
+
+
+# Update all finding objects that came from whitesource reports
+def migrate_whitesource_findings_to_mend(apps, schema_editor):
+ finding_model = apps.get_model('dojo', 'Finding')
+ test_type_model = apps.get_model('dojo', 'Test_Type')
+ # Get or create Mend Test Type and fetch the whitesource test types
+ mend_test_type, _ = test_type_model.objects.get_or_create(name="Mend Scan", active=True)
+ whitesource_test_type = test_type_model.objects.filter(name="Whitesource Scan").first()
+ # Get all the findings found by whitesource
+ findings = finding_model.objects.filter(test__scan_type__in=WHITESOURCE_REFERENCES)
+ logger.warning(f'We identified {findings.count()} Whitesource findings to migrate to Mend findings')
+ # Iterate over all findings and change
+ for finding in findings:
+ # Update the found by field
+ update_finding(finding, mend_test_type, whitesource_test_type)
+ # Update the test object
+ update_test(finding.test, mend_test_type)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0198_alter_system_settings_enable_deduplication'),
+ ]
+
+ operations = [
+ migrations.RunPython(migrate_whitesource_findings_to_mend),
+ ]
diff --git a/dojo/db_migrations/0200_finding_sla_expiration_date_product_async_updating_and_more.py b/dojo/db_migrations/0200_finding_sla_expiration_date_product_async_updating_and_more.py
new file mode 100644
index 00000000000..20ef3e4f689
--- /dev/null
+++ b/dojo/db_migrations/0200_finding_sla_expiration_date_product_async_updating_and_more.py
@@ -0,0 +1,31 @@
+# Generated by Django 4.1.13 on 2024-01-17 03:07
+
+from django.db import migrations, models
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0199_whitesource_to_mend'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='finding',
+ name='sla_expiration_date',
+ field=models.DateField(blank=True, help_text="(readonly)The date SLA expires for this finding. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Expiration Date'),
+ ),
+ migrations.AddField(
+ model_name='product',
+ name='async_updating',
+ field=models.BooleanField(default=False, help_text='Findings under this Product or SLA configuration are asynchronously being updated'),
+ ),
+ migrations.AddField(
+ model_name='sla_configuration',
+ name='async_updating',
+ field=models.BooleanField(default=False, help_text='Findings under this SLA configuration are asynchronously being updated'),
+ ),
+ ]
diff --git a/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py
new file mode 100644
index 00000000000..4b886301de7
--- /dev/null
+++ b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py
@@ -0,0 +1,133 @@
+from django.db import migrations
+from django.utils import timezone
+from datetime import datetime
+from django.conf import settings
+from dateutil.relativedelta import relativedelta
+import logging
+
+from dojo.utils import get_work_days
+
+logger = logging.getLogger(__name__)
+
+
+def calculate_sla_expiration_dates(apps, schema_editor):
+ System_Settings = apps.get_model('dojo', 'System_Settings')
+
+ ss, _ = System_Settings.objects.get_or_create()
+ if not ss.enable_finding_sla:
+ return
+
+ logger.info('Calculating SLA expiration dates for all findings')
+
+ SLA_Configuration = apps.get_model('dojo', 'SLA_Configuration')
+ Finding = apps.get_model('dojo', 'Finding')
+
+ findings = Finding.objects.filter(sla_expiration_date__isnull=True).order_by('id').only('id', 'sla_start_date', 'date', 'severity', 'test', 'mitigated')
+
+ page_size = 1000
+ total_count = Finding.objects.filter(id__gt=0).count()
+ logger.info('Found %d findings to be updated', total_count)
+
+ i = 0
+ batch = []
+ last_id = 0
+ total_pages = (total_count // page_size) + 2
+ for p in range(1, total_pages):
+ page = findings.filter(id__gt=last_id)[:page_size]
+ for find in page:
+ i += 1
+ last_id = find.id
+
+ start_date = find.sla_start_date if find.sla_start_date else find.date
+
+ sla_config = SLA_Configuration.objects.filter(id=find.test.engagement.product.sla_configuration_id).first()
+ sla_period = getattr(sla_config, find.severity.lower(), None)
+
+ days = None
+ if settings.SLA_BUSINESS_DAYS:
+ if find.mitigated:
+ days = get_work_days(find.date, find.mitigated.date())
+ else:
+ days = get_work_days(find.date, timezone.now().date())
+ else:
+ if isinstance(start_date, datetime):
+ start_date = start_date.date()
+
+ if find.mitigated:
+ days = (find.mitigated.date() - start_date).days
+ else:
+ days = (timezone.now().date() - start_date).days
+
+ days = days if days > 0 else 0
+
+ days_remaining = None
+ if sla_period:
+ days_remaining = sla_period - days
+
+ if days_remaining:
+ if find.mitigated:
+ find.sla_expiration_date = find.mitigated.date() + relativedelta(days=days_remaining)
+ else:
+ find.sla_expiration_date = timezone.now().date() + relativedelta(days=days_remaining)
+
+ batch.append(find)
+
+ if (i > 0 and i % page_size == 0):
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+
+def reset_sla_expiration_dates(apps, schema_editor):
+ System_Settings = apps.get_model('dojo', 'System_Settings')
+
+ ss, _ = System_Settings.objects.get_or_create()
+ if not ss.enable_finding_sla:
+ return
+
+ logger.info('Resetting SLA expiration dates for all findings')
+
+ Finding = apps.get_model('dojo', 'Finding')
+
+ findings = Finding.objects.filter(sla_expiration_date__isnull=False).order_by('id').only('id')
+
+ page_size = 1000
+ total_count = Finding.objects.filter(id__gt=0).count()
+ logger.info('Found %d findings to be reset', total_count)
+
+ i = 0
+ batch = []
+ last_id = 0
+ total_pages = (total_count // page_size) + 2
+ for p in range(1, total_pages):
+ page = findings.filter(id__gt=last_id)[:page_size]
+ for find in page:
+ i += 1
+ last_id = find.id
+
+ find.sla_expiration_date = None
+ batch.append(find)
+
+ if (i > 0 and i % page_size == 0):
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0200_finding_sla_expiration_date_product_async_updating_and_more'),
+ ]
+
+ operations = [
+ migrations.RunPython(calculate_sla_expiration_dates, reset_sla_expiration_dates),
+ ]
diff --git a/dojo/db_migrations/0202_alter_dojo_group_social_provider.py b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py
new file mode 100644
index 00000000000..9bbc7e2e5c6
--- /dev/null
+++ b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.13 on 2024-01-25 00:07
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0201_populate_finding_sla_expiration_date'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='dojo_group',
+ name='social_provider',
+ field=models.CharField(blank=True, choices=[('AzureAD', 'AzureAD'), ('Remote', 'Remote')], help_text='Group imported from a social provider.', max_length=10, null=True, verbose_name='Social Authentication Provider'),
+ ),
+ ]
diff --git a/dojo/decorators.py b/dojo/decorators.py
index 2fca3d453e5..39e16bb026c 100644
--- a/dojo/decorators.py
+++ b/dojo/decorators.py
@@ -3,9 +3,9 @@
from django.db import models
from django.conf import settings
-from ratelimit.exceptions import Ratelimited
-from ratelimit.core import is_ratelimited
-from ratelimit import ALL
+from django_ratelimit.exceptions import Ratelimited
+from django_ratelimit.core import is_ratelimited
+from django_ratelimit import UNSAFE
import logging
@@ -22,7 +22,7 @@ def we_want_async(*args, func=None, **kwargs):
logger.debug('dojo_async_task %s: running task in the foreground as sync=True has been found as kwarg', func)
return False
- user = get_current_user()
+ user = kwargs.get('async_user', get_current_user())
logger.debug('user: %s', user)
if Dojo_User.wants_block_execution(user):
@@ -38,8 +38,12 @@ def we_want_async(*args, func=None, **kwargs):
def dojo_async_task(func):
@wraps(func)
def __wrapper__(*args, **kwargs):
+ from dojo.utils import get_current_user
+ user = get_current_user()
+ kwargs['async_user'] = user
+ countdown = kwargs.pop("countdown", 0)
if we_want_async(*args, func=func, **kwargs):
- return func.delay(*args, **kwargs)
+ return func.apply_async(args=args, kwargs=kwargs, countdown=countdown)
else:
return func(*args, **kwargs)
@@ -155,12 +159,12 @@ def wrapper(self, *args, **kwargs):
f = open("/tmp/selenium_page_source.html", "w", encoding='utf-8')
f.writelines(self.driver.page_source)
# time.sleep(30)
- raise(e)
+ raise e
return wrapper
-def dojo_ratelimit(key='ip', rate=None, method=ALL, block=False):
+def dojo_ratelimit(key='ip', rate=None, method=UNSAFE, block=False):
def decorator(fn):
@wraps(fn)
def _wrapped(request, *args, **kw):
@@ -178,7 +182,7 @@ def _wrapped(request, *args, **kw):
if username:
dojo_user = Dojo_User.objects.filter(username=username).first()
if dojo_user:
- Dojo_User.enable_force_password_rest(dojo_user)
+ Dojo_User.enable_force_password_reset(dojo_user)
raise Ratelimited()
return fn(request, *args, **kw)
return _wrapped
diff --git a/dojo/development_environment/urls.py b/dojo/development_environment/urls.py
index d6edccf44e0..a61b5073258 100644
--- a/dojo/development_environment/urls.py
+++ b/dojo/development_environment/urls.py
@@ -1,12 +1,12 @@
-from django.conf.urls import url
+from django.urls import re_path
from dojo.development_environment import views
urlpatterns = [
# dev envs
- url(r'^dev_env$', views.dev_env, name='dev_env'),
- url(r'^dev_env/add$', views.add_dev_env,
+ re_path(r'^dev_env$', views.dev_env, name='dev_env'),
+ re_path(r'^dev_env/add$', views.add_dev_env,
name='add_dev_env'),
- url(r'^dev_env/(?P\d+)/edit$',
+ re_path(r'^dev_env/(?P\d+)/edit$',
views.edit_dev_env, name='edit_dev_env'),
]
diff --git a/dojo/development_environment/views.py b/dojo/development_environment/views.py
index 9d199e2c300..d6d4c167b17 100644
--- a/dojo/development_environment/views.py
+++ b/dojo/development_environment/views.py
@@ -1,6 +1,7 @@
# #dev envs
import logging
+from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.urls import reverse
from django.http import HttpResponseRedirect
@@ -17,7 +18,7 @@
logger = logging.getLogger(__name__)
-@user_is_configuration_authorized('dojo.view_development_environment', 'staff')
+@login_required
def dev_env(request):
initial_queryset = Development_Environment.objects.all().order_by('name')
name_words = [de.name for de in
@@ -34,7 +35,7 @@ def dev_env(request):
'name_words': name_words})
-@user_is_configuration_authorized('dojo.add_development_environment', 'staff')
+@user_is_configuration_authorized('dojo.add_development_environment')
def add_dev_env(request):
form = Development_EnvironmentForm()
if request.method == 'POST':
@@ -55,7 +56,7 @@ def add_dev_env(request):
})
-@user_is_configuration_authorized('dojo.change_development_environment', 'staff')
+@user_is_configuration_authorized('dojo.change_development_environment')
def edit_dev_env(request, deid):
de = get_object_or_404(Development_Environment, pk=deid)
form1 = Development_EnvironmentForm(instance=de)
@@ -71,7 +72,7 @@ def edit_dev_env(request, deid):
extra_tags='alert-success')
return HttpResponseRedirect(reverse('dev_env'))
if request.method == 'POST' and request.POST.get('delete_dev_env'):
- user_has_configuration_permission_or_403(request.user, 'dojo.delete_development_environment', 'staff')
+ user_has_configuration_permission_or_403(request.user, 'dojo.delete_development_environment')
form2 = Delete_Dev_EnvironmentForm(request.POST, instance=de)
if form2.is_valid():
try:
diff --git a/dojo/endpoint/queries.py b/dojo/endpoint/queries.py
index 622d4c282a5..df8c0b86f3e 100644
--- a/dojo/endpoint/queries.py
+++ b/dojo/endpoint/queries.py
@@ -1,5 +1,4 @@
from crum import get_current_user
-from django.conf import settings
from django.db.models import Exists, OuterRef, Q
from dojo.models import Endpoint, Endpoint_Status, Product_Member, Product_Type_Member, \
Product_Group, Product_Type_Group
@@ -22,9 +21,6 @@ def get_authorized_endpoints(permission, queryset=None, user=None):
if user.is_superuser:
return endpoints
- if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
- return endpoints
-
if user_has_global_permission(user, permission):
return endpoints
@@ -73,9 +69,6 @@ def get_authorized_endpoint_status(permission, queryset=None, user=None):
if user.is_superuser:
return endpoint_status
- if user.is_staff and settings.AUTHORIZATION_STAFF_OVERRIDE:
- return endpoint_status
-
if user_has_global_permission(user, permission):
return endpoint_status
diff --git a/dojo/endpoint/urls.py b/dojo/endpoint/urls.py
index 6f2f1d737f3..56afa2411bc 100644
--- a/dojo/endpoint/urls.py
+++ b/dojo/endpoint/urls.py
@@ -1,41 +1,41 @@
-from django.conf.urls import url
+from django.urls import re_path
from dojo.endpoint import views
urlpatterns = [
# endpoints
- url(r'^endpoint$', views.all_endpoints,
+ re_path(r'^endpoint$', views.all_endpoints,
name='endpoint'),
- url(r'^endpoint/host$', views.all_endpoint_hosts,
+ re_path(r'^endpoint/host$', views.all_endpoint_hosts,
name='endpoint_host'),
- url(r'^endpoint/vulnerable$', views.vulnerable_endpoints,
+ re_path(r'^endpoint/vulnerable$', views.vulnerable_endpoints,
name='vulnerable_endpoints'),
- url(r'^endpoint/host/vulnerable$', views.vulnerable_endpoint_hosts,
+ re_path(r'^endpoint/host/vulnerable$', views.vulnerable_endpoint_hosts,
name='vulnerable_endpoint_hosts'),
- url(r'^endpoint/(?P\d+)$', views.view_endpoint,
+ re_path(r'^endpoint/(?P\d+)$', views.view_endpoint,
name='view_endpoint'),
- url(r'^endpoint/host/(?P\d+)$', views.view_endpoint_host,
+ re_path(r'^endpoint/host/(?P\d+)$', views.view_endpoint_host,
name='view_endpoint_host'),
- url(r'^endpoint/(?P\d+)/edit$', views.edit_endpoint,
+ re_path(r'^endpoint/(?P\d+)/edit$', views.edit_endpoint,
name='edit_endpoint'),
- url(r'^endpoints/(?P\d+)/add$', views.add_endpoint,
+ re_path(r'^endpoints/(?P\d+)/add$', views.add_endpoint,
name='add_endpoint'),
- url(r'^endpoint/(?P\d+)/delete$', views.delete_endpoint,
+ re_path(r'^endpoint/(?P\d+)/delete$', views.delete_endpoint,
name='delete_endpoint'),
- url(r'^endpoints/add$', views.add_product_endpoint,
+ re_path(r'^endpoints/add$', views.add_product_endpoint,
name='add_product_endpoint'),
- url(r'^endpoint/(?P\d+)/add_meta_data$', views.add_meta_data,
+ re_path(r'^endpoint/(?P\d+)/add_meta_data$', views.add_meta_data,
name='add_endpoint_meta_data'),
- url(r'^endpoint/(?P\d+)/edit_meta_data$', views.edit_meta_data,
+ re_path(r'^endpoint/(?P\d+)/edit_meta_data$', views.edit_meta_data,
name='edit_endpoint_meta_data'),
- url(r'^endpoint/bulk$', views.endpoint_bulk_update_all,
+ re_path(r'^endpoint/bulk$', views.endpoint_bulk_update_all,
name='endpoints_bulk_all'),
- url(r'^product/(?P\d+)/endpoint/bulk_product$', views.endpoint_bulk_update_all,
+ re_path(r'^product/(?P\d+)/endpoint/bulk_product$', views.endpoint_bulk_update_all,
name='endpoints_bulk_update_all_product'),
- url(r'^endpoint/(?P\d+)/bulk_status$', views.endpoint_status_bulk_update,
+ re_path(r'^endpoint/(?P\d+)/bulk_status$', views.endpoint_status_bulk_update,
name='endpoints_status_bulk'),
- url(r'^endpoint/migrate$', views.migrate_endpoints_view,
+ re_path(r'^endpoint/migrate$', views.migrate_endpoints_view,
name='endpoint_migrate'),
- url(r'^endpoint/(?P