Skip to content

Commit

Permalink
Fix T20
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik committed Jul 3, 2024
1 parent ad593b7 commit 2de0dfc
Show file tree
Hide file tree
Showing 43 changed files with 208 additions and 236 deletions.
6 changes: 4 additions & 2 deletions docker/install_chrome_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
This solution is largely based on the Playwright's browser dependencies script at
https://github.com/microsoft/playwright/blob/main/utils/linux-browser-dependencies/inside_docker/list_dependencies.js
"""

import logging
import subprocess

logger = logging.getLogger(__name__)


def find_packages(library_name):
stdout = run_command(["apt-file", "search", library_name])
Expand Down Expand Up @@ -58,4 +60,4 @@ def ldd(file_path):
for p in packages:
missing_packages.append(p)

print(" ".join(missing_packages))
logger.info("missing_packages: " + (" ".join(missing_packages)))
1 change: 0 additions & 1 deletion dojo/api_v2/prefetch/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ def _get_path_to_GET_serializer_map(generator):
method,
view,
) in generator._get_paths_and_endpoints():
# print(path, path_pattern, method, view)
if method == "GET":
if hasattr(view, "get_serializer_class"):
path_to_GET_serializer[path] = view.get_serializer_class()
Expand Down
1 change: 0 additions & 1 deletion dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1178,7 +1178,6 @@ class Meta:
exclude = ("inherited_tags",)

def validate(self, data):
# print('EndpointSerialize.validate')

if not self.context["request"].method == "PATCH":
if "product" not in data:
Expand Down
1 change: 0 additions & 1 deletion dojo/api_v2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1711,7 +1711,6 @@ def destroy(self, request, *args, **kwargs):
return Response(status=status.HTTP_204_NO_CONTENT)

# def list(self, request):
# print(vars(request))
# # Note the use of `get_queryset()` instead of `self.queryset`
# queryset = self.get_queryset()
# serializer = self.serializer_class(queryset, many=True)
Expand Down
2 changes: 1 addition & 1 deletion dojo/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class DojoAppConfig(AppConfig):

def ready(self):
# we need to initializer waston here because in models.py is to early if we want add extra fields to index
# print('ready(): initializing watson')
# logger.info('ready(): initializing watson')
# commented out ^ as it prints in manage.py dumpdata, docker logs and many other places
# logger doesn't work yet at this stage

Expand Down
2 changes: 1 addition & 1 deletion dojo/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

@app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
logger.info(f'Request: {self.request!r}')


@setup_logging.connect
Expand Down
4 changes: 2 additions & 2 deletions dojo/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,8 @@ def wrapper(self, *args, **kwargs):
return func(self, *args, **kwargs)

except Exception:
print("exception occured at url:", self.driver.current_url)
print("page source:", self.driver.page_source)
logger.info(f"exception occured at url: {self.driver.current_url}")
logger.info(f"page source: {self.driver.page_source}")
f = open("/tmp/selenium_page_source.html", "w", encoding='utf-8')
f.writelines(self.driver.page_source)
# time.sleep(30)
Expand Down
2 changes: 1 addition & 1 deletion dojo/importers/options.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from datetime import datetime
from functools import wraps
from pprint import pprint as pp
from pprint import pformat as pp
from typing import Any, Callable, List

from django.contrib.auth.models import User
Expand Down
2 changes: 1 addition & 1 deletion dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1048,7 +1048,7 @@ def issue_from_jira_is_active(issue_from_jira):
# "resolution": "None"

if not hasattr(issue_from_jira.fields, 'resolution'):
print(vars(issue_from_jira))
logger.debug(vars(issue_from_jira))
return True

if not issue_from_jira.fields.resolution:
Expand Down
14 changes: 9 additions & 5 deletions dojo/management/commands/dupecheck.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import logging

from django.core.management.base import BaseCommand
from django.db.models import Count

Expand All @@ -8,22 +10,24 @@
This script will identify duplicates in DefectDojo:
"""

logger = logging.getLogger(__name__)


class Command(BaseCommand):
help = 'No input commands for dedupe findings.'

def count_the_duplicates(self, model, column):
print("===================================")
print(" Table:" + str(model) + " Column: " + column)
print("===================================")
logger.info("===================================")
logger.info(" Table:" + str(model) + " Column: " + column)
logger.info("===================================")
duplicates = model.objects.values(column).annotate(Count('id')).order_by().filter(id__count__gt=1)
kwargs = {'{}__{}'.format(column, 'in'): [item[column] for item in duplicates]}
duplicates = model.objects.filter(**kwargs)

if not duplicates:
print("No duplicates found")
logger.info("No duplicates found")
for dupe in duplicates:
print(f'{dupe.id}, Duplicate value: {getattr(dupe, column)}, Object: {dupe}')
logger.info(f'{dupe.id}, Duplicate value: {getattr(dupe, column)}, Object: {dupe}')

def handle(self, *args, **options):
self.count_the_duplicates(Product, 'name')
Expand Down
15 changes: 9 additions & 6 deletions dojo/management/commands/jira_async_updates.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import logging

from django.core.management.base import BaseCommand
from django.utils import timezone
from jira.exceptions import JIRAError
Expand All @@ -10,6 +12,8 @@
Useful if you need to make bulk changes with JIRA:
"""

logger = logging.getLogger(__name__)


class Command(BaseCommand):
help = 'No input commands for JIRA bulk update.'
Expand All @@ -28,15 +32,14 @@ def handle(self, *args, **options):
issue = jira.issue(j_issue.jira_id)

# Issue Cloned
print(issue.fields.issuelinks[0])
logger.info(issue.fields.issuelinks[0])

print("Jira Issue: " + str(issue))
print("Resolution: " + str(issue.fields.resolution))
logger.info("Jira Issue: " + str(issue))
logger.info("Resolution: " + str(issue.fields.resolution))

if issue.fields.resolution is not None \
and not finding.under_defect_review:
# print issue.fields.__dict__
print("Jira Issue: " + str(issue) + " changed status")
logger.info("Jira Issue: " + str(issue) + " changed status")

# Create Jira Note
now = timezone.now()
Expand All @@ -57,4 +60,4 @@ def handle(self, *args, **options):
finding)
finding.save()
else:
print("No update necessary")
logger.info("No update necessary")
4 changes: 2 additions & 2 deletions dojo/management/commands/jira_status_reconciliation.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def jira_status_reconciliation(*args, **kwargs):
logger.debug('mode: %s product:%s engagement: %s dryrun: %s', mode, product, engagement, dryrun)

if mode and mode not in ('push_status_to_jira', 'import_status_from_jira', 'reconcile'):
print('mode must be one of reconcile, push_status_to_jira or import_status_from_jira')
logger.info('mode must be one of reconcile, push_status_to_jira or import_status_from_jira')
return False

if not mode:
Expand Down Expand Up @@ -195,7 +195,7 @@ def jira_status_reconciliation(*args, **kwargs):

logger.info('results (semicolon seperated)')
for message in messages:
print(message)
logger.info(message)


class Command(BaseCommand):
Expand Down
6 changes: 4 additions & 2 deletions dojo/management/commands/migrate_surveys.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import sys

from django.core.management.base import BaseCommand
Expand All @@ -9,6 +10,7 @@
Author: Cody Maffucci
This script will migrate survey data from one external app to core dojo
"""
logger = logging.getLogger(__name__)


class Command(BaseCommand):
Expand Down Expand Up @@ -56,9 +58,9 @@ def handle(self, *args, **options):
update_string = 'UPDATE `' + new_table_name + '` SET polymorphic_ctype_id = ' + str(ctype_id) + ';'
cursor.execute(str(update_string))
# Drop the ddse table
print('All defectDojo_engagement_sruvey tables migrated to dojo tables')
logger.info('All defectDojo_engagement_sruvey tables migrated to dojo tables')

# Delete the old tables in reverse order to drop the children first
for table in reversed(table_list):
cursor.execute('DROP TABLE `' + table + '`;')
print('All defectDojo_engagement_sruvey tables removed')
logger.info('All defectDojo_engagement_sruvey tables removed')
7 changes: 5 additions & 2 deletions dojo/management/commands/print_settings.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import logging
import os
from pprint import pprint
from pprint import pformat

from django.conf import settings
from django.core.management.base import BaseCommand

logger = logging.getLogger(__name__)


class Command(BaseCommand):
help = 'Display all the currently loaded settings in the project'
Expand All @@ -18,4 +21,4 @@ def handle(self, *args, **options):
value = getattr(settings, attr)
a_dict[attr] = value

pprint(a_dict)
logging.info(pformat(a_dict))
8 changes: 6 additions & 2 deletions dojo/management/commands/push_to_jira_update.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import logging

from django.core.management.base import BaseCommand
from pytz import timezone

import dojo.jira_link.helper as jira_helper
from dojo.models import Finding
from dojo.utils import get_system_setting

logger = logging.getLogger(__name__)

locale = timezone(get_system_setting('time_zone'))

"""
Expand All @@ -22,6 +26,6 @@ def handle(self, *args, **options):
findings = findings.filter(verified=True, active=True)

for finding in findings:
print("Checking issue:" + str(finding.id))
logger.info("Checking issue:" + str(finding.id))
jira_helper.update_jira_issue(finding, True)
print("########\n")
logger.info("########\n")
17 changes: 9 additions & 8 deletions dojo/management/commands/test_celery_decorator.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@

import logging
from functools import wraps

from django.core.management.base import BaseCommand
Expand All @@ -8,6 +9,8 @@
from dojo.models import Finding, Notes
from dojo.utils import test_valentijn

logger = logging.getLogger(__name__)


class Command(BaseCommand):
help = "Command to do some tests with celery and decorators. Just committing it so 'we never forget'"
Expand All @@ -20,7 +23,6 @@ def handle(self, *args, **options):
test_valentijn(finding, Notes.objects.all().first())
# test_valentijn(1)

# print('sync')
# my_test_task(finding)

# sync
Expand All @@ -30,7 +32,6 @@ def handle(self, *args, **options):
# inside after
# outside after

# print('async')
# my_test_task.delay(finding)

# async
Expand All @@ -48,15 +49,15 @@ def handle(self, *args, **options):

def test2(clazz, id):
model = clazz.objects.get(id=id)
print(model)
logger.debug(model)


def my_decorator_outside(func):
@wraps(func)
def wrapper(*args, **kwargs):
print("outside before")
logger.debug("outside before")
func(*args, **kwargs)
print("outside after")
logger.debug("outside after")

if getattr(func, 'delay', None):
wrapper.delay = my_decorator_outside(func.delay)
Expand All @@ -67,17 +68,17 @@ def wrapper(*args, **kwargs):
def my_decorator_inside(func):
@wraps(func)
def wrapper(*args, **kwargs):
print("inside before")
logger.debug("inside before")
func(*args, **kwargs)
print("inside after")
logger.debug("inside after")
return wrapper


@my_decorator_outside
@app.task
@my_decorator_inside
def my_test_task(new_finding, *args, **kwargs):
print('oh la la what a nice task')
logger.debug('oh la la what a nice task')


# example working with multiple parameters...
Expand Down
4 changes: 0 additions & 4 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,6 @@ def product(request):
# perform annotation/prefetching by replacing the queryset in the page with an annotated/prefetched queryset.
prod_list.object_list = prefetch_for_product(prod_list.object_list)

# print(prod_list.object_list.explain)

add_breadcrumb(title=_("Product List"), top_level=not len(request.GET), request=request)

return render(request, 'dojo/product.html', {
Expand Down Expand Up @@ -232,7 +230,6 @@ def view_product(request, pid):
success_percent = round((float(total_pass) / float(total)) * 100, 2)
waiting_percent = round((float(total_wait) / float(total)) * 100, 2)
fail_percent = round(100 - success_percent - waiting_percent, 2)
print(fail_percent)
benchAndPercent.append({
'id': benchmarks[i].benchmark_type.id,
'name': benchmarks[i].benchmark_type,
Expand Down Expand Up @@ -1649,7 +1646,6 @@ def edit_notifications(request, pid):
logger.debug('existing product notifications found')

form = ProductNotificationsForm(request.POST, instance=product_notifications)
# print(vars(form))

if form.is_valid():
form.save()
Expand Down
6 changes: 1 addition & 5 deletions dojo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -856,9 +856,7 @@ def get_punchcard_data(objs, start_date, weeks, view='Finding'):

# add week in progress + empty weeks on the end if needed
while tick < weeks + 1:
# print(tick)
week_data, label = get_week_data(start_of_week, tick, day_counts)
# print(week_data, label)
punchcard.extend(week_data)
ticks.append(label)
tick += 1
Expand Down Expand Up @@ -1776,11 +1774,9 @@ def is_safe_url(url):

def get_return_url(request):
return_url = request.POST.get('return_url', None)
# print('return_url from POST: ', return_url)
if return_url is None or not return_url.strip():
# for some reason using request.GET.get('return_url') never works
return_url = request.GET['return_url'] if 'return_url' in request.GET else None
# print('return_url from GET: ', return_url)

return return_url if return_url else None

Expand Down Expand Up @@ -2334,7 +2330,7 @@ def crawl(self, object, model_list, **kwargs):
logger.debug('ASYNC_DELETE: Deleting ' + str(len(objects_to_delete)) + ' ' + self.get_object_name(model) + 's in chunks')
chunks = self.chunk_list(model, objects_to_delete)
for chunk in chunks:
print('deleting', len(chunk), self.get_object_name(model))
logger.debug(f'deleting {len(chunk)} {self.get_object_name(model)}')
self.delete_chunk(chunk)
self.delete_chunk([object])
logger.debug('ASYNC_DELETE: Successfully deleted ' + self.get_object_name(object) + ': ' + str(object))
Expand Down
2 changes: 1 addition & 1 deletion dojo/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,5 +222,5 @@ def access_file(request, fid, oid, obj_type, url=False):
# If reaching this far, user must have permission to get file
file = get_object_or_404(FileUpload, pk=fid)
redirect_url = f'{settings.MEDIA_ROOT}/{file.file.url.lstrip(settings.MEDIA_URL)}'
print(redirect_url)
logger.debug(redirect_url)
return FileResponse(open(redirect_url, "rb"))
2 changes: 1 addition & 1 deletion dojo/wsgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def is_debugger_listening(port):
if os.environ.get("DD_DEBUG_WAIT_FOR_CLIENT") == "True":
logger.info(f"Waiting for the debugging client to connect on port {debugpy_port}")
debugpy.wait_for_client() # noqa: T100
print("Debugging client connected, resuming execution")
logger.debug("Debugging client connected, resuming execution")
except RuntimeError as e:
if str(e) != "Can't listen for client connections: [Errno 98] Address already in use":
logger.exception(e)
Expand Down
Loading

0 comments on commit 2de0dfc

Please sign in to comment.