From 0e0357fd9374209bcc95e716441a4b01a889d3c6 Mon Sep 17 00:00:00 2001 From: osuhickeys Date: Thu, 14 Sep 2023 08:59:59 -0500 Subject: [PATCH] Commit 0005 --- config/dcpd_config_example.py | 4 +- releases/v1.2.1.txt | 29 +++++++++ src/dcpd_api_spec.json | 2 +- src/dcpd_main.py | 6 +- src/dcpd_redaction.py | 116 +++++++++++++++++----------------- 5 files changed, 93 insertions(+), 64 deletions(-) create mode 100755 releases/v1.2.1.txt diff --git a/config/dcpd_config_example.py b/config/dcpd_config_example.py index 0dec9ea..2804ea6 100755 --- a/config/dcpd_config_example.py +++ b/config/dcpd_config_example.py @@ -9,7 +9,7 @@ ] # Set the version -VERSION = "v1.2.0" +VERSION = "v1.2.1" # Replace with desired file name for the output html file DEFAULT_OUTPUT_HTML_FILE_NAME = "dcpd_output.html" @@ -162,7 +162,7 @@ DEFAULT_SORT_ORDER = "none" # Default log file size -MAX_LOG_SIZE = 10 * 1024 * 1024 # 10 MB +MAX_LOG_SIZE = 1 * 1024 * 1024 # 1 MB # Default numer of logs to keep LOG_RETENTION_COUNT = 5 # Keep the latest 5 log files. diff --git a/releases/v1.2.1.txt b/releases/v1.2.1.txt new file mode 100755 index 0000000..3e0651d --- /dev/null +++ b/releases/v1.2.1.txt @@ -0,0 +1,29 @@ +Release Notes - docker-compose-ports-dump + +Version: 1.2.1 +Release Date: September 17th, 2023 + +Highlights + 1: None for this release + +New Features + 1: None for this release + +Improvements + 1: Reduce debug and info log sizes from 10 MB to 1 MB for web viewing performance + +Bug Fixes + 1: Fixed weather update to run every time + 2: Fixed support zip files so all are now included + 3: Fixed host networking total in dcpd stats file + +Breaking Changes + 1. None for this release + +Deprecated Features (if any) + 1. None for this release + +Known Issues + 1. Global search and column filters - Searching external and internal port for 80 will + return 80 and 7801 as expected. However, it will also return 8405. + 2. Does not work behind a proxy diff --git a/src/dcpd_api_spec.json b/src/dcpd_api_spec.json index 9510d19..8ed1fc9 100755 --- a/src/dcpd_api_spec.json +++ b/src/dcpd_api_spec.json @@ -4,7 +4,7 @@ "info": { "description": "dcpd API Documentation", "title": "dcpd API", - "version": "v1.1.1" + "version": "v1.2.0" }, "paths": { "/api/data/fetch_table/{table_name}": { diff --git a/src/dcpd_main.py b/src/dcpd_main.py index a30a576..8b3a8c7 100755 --- a/src/dcpd_main.py +++ b/src/dcpd_main.py @@ -180,13 +180,13 @@ def dcpd(args): debug_info, port_mapping_str, ports_data_str, environment_data_lines = dcpd_debug.generate_debug_info(cursor) dcpd_debug.print_debug_output(debug_info, port_mapping_str, ports_data_str, environment_data_lines, paginate=True, display=False) - # Generate the statistics and write to a file - dcpd_stats.execute_statistics_generation(cursor, args) - # Gather services attached to the host network dcpd_hn.host_networking(cursor, args) logger_info.info("Services attached to host networking collected.") + # Generate the statistics and write to a file + dcpd_stats.execute_statistics_generation(cursor, args) + # Process arguments that require extracting and presenting data from the database in various ways. if args.debug: # Generate an exhaustive debug report covering environment, port mappings, and software details and display to console. diff --git a/src/dcpd_redaction.py b/src/dcpd_redaction.py index bf55e4d..022038c 100755 --- a/src/dcpd_redaction.py +++ b/src/dcpd_redaction.py @@ -62,28 +62,28 @@ def redact_dcpd_html_location(args): if args.verbose: print("Starting redaction of location information from dcpd_html.json...") - try: - with open('../data/dcpd_html.json', 'r', encoding='utf-8') as file: - data = json.load(file) - data['location'] = '[REDACTED]' - with open('../data/redacted_dcpd_html.json', 'w', encoding='utf-8') as outfile: - json.dump(data, outfile, indent=4) - - logger_info.info("Successfully redacted location information from dcpd_html.json") - if args.verbose: - print("Redaction of location information from dcpd_html.json completed.") - - except (FileNotFoundError, PermissionError, IOError) as file_error: - logger_info.error("File operation error during redaction: %s", file_error) - logger_debug.exception("File operation error during redaction with detailed traceback") - if args.verbose: - print(f"File operation error during redaction: {file_error}") - - except json.JSONDecodeError as json_error: - logger_info.error("JSON parsing error during redaction: %s", json_error) - logger_debug.exception("JSON parsing error with detailed traceback") - if args.verbose: - print(f"JSON parsing error during redaction: {json_error}") + try: + with open('../data/dcpd_html.json', 'r', encoding='utf-8') as file: + data = json.load(file) + data['location'] = '[REDACTED]' + with open('../data/redacted_dcpd_html.json', 'w', encoding='utf-8') as outfile: + json.dump(data, outfile, indent=4) + + logger_info.info("Successfully redacted location information from dcpd_html.json") + if args.verbose: + print("Redaction of location information from dcpd_html.json completed.") + + except (FileNotFoundError, PermissionError, IOError) as file_error: + logger_info.error("File operation error during redaction: %s", file_error) + logger_debug.exception("File operation error during redaction with detailed traceback") + if args.verbose: + print(f"File operation error during redaction: {file_error}") + + except json.JSONDecodeError as json_error: + logger_info.error("JSON parsing error during redaction: %s", json_error) + logger_debug.exception("JSON parsing error with detailed traceback") + if args.verbose: + print(f"JSON parsing error during redaction: {json_error}") # Logging the conclusion of the function and, if verbosity is enabled, logger_info.info("Exiting redact_dcpd_html_location()") @@ -113,37 +113,37 @@ def redact_dcpd_cache_location(args): if args.verbose: print("Starting sensitive data redaction from dcpd_cache_location.json...") - try: - with open('../data/dcpd_cache_location.json', 'r', encoding='utf-8') as file: - data = json.load(file) - data['ip'] = re.sub(r'\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b', '[REDACTED]', data['ip']) - sensitive_fields = ['hostname', 'city', 'region', 'country', 'loc', 'org', 'postal', 'timezone'] - for field in sensitive_fields: - data[field] = '[REDACTED]' - with open('../data/redacted_dcpd_cache_location.json', 'w', encoding='utf-8') as outfile: - json.dump(data, outfile, indent=4) - - logger_info.info("Successfully redacted sensitive data from dcpd_cache_location.json") - if args.verbose: - print("Sensitive data redaction from dcpd_cache_location.json completed.") - - except (FileNotFoundError, PermissionError, IOError) as file_error: - logger_info.error("File operation error during redaction: %s", file_error) - logger_debug.exception("File operation error during redaction with detailed traceback") - if args.verbose: - print(f"File operation error during redaction: {file_error}") - - except json.JSONDecodeError as json_error: - logger_info.error("JSON parsing error during redaction: %s", json_error) - logger_debug.exception("JSON parsing error with detailed traceback") - if args.verbose: - print(f"JSON parsing error during redaction: {json_error}") - - except re.error as regex_error: - logger_info.error("Regex substitution error during redaction: %s", regex_error) - logger_debug.exception("Regex substitution error with detailed traceback") - if args.verbose: - print(f"Regex substitution error during redaction: {regex_error}") + try: + with open('../data/dcpd_cache_location.json', 'r', encoding='utf-8') as file: + data = json.load(file) + data['ip'] = re.sub(r'\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b', '[REDACTED]', data['ip']) + sensitive_fields = ['hostname', 'city', 'region', 'country', 'loc', 'org', 'postal', 'timezone'] + for field in sensitive_fields: + data[field] = '[REDACTED]' + with open('../data/redacted_dcpd_cache_location.json', 'w', encoding='utf-8') as outfile: + json.dump(data, outfile, indent=4) + + logger_info.info("Successfully redacted sensitive data from dcpd_cache_location.json") + if args.verbose: + print("Sensitive data redaction from dcpd_cache_location.json completed.") + + except (FileNotFoundError, PermissionError, IOError) as file_error: + logger_info.error("File operation error during redaction: %s", file_error) + logger_debug.exception("File operation error during redaction with detailed traceback") + if args.verbose: + print(f"File operation error during redaction: {file_error}") + + except json.JSONDecodeError as json_error: + logger_info.error("JSON parsing error during redaction: %s", json_error) + logger_debug.exception("JSON parsing error with detailed traceback") + if args.verbose: + print(f"JSON parsing error during redaction: {json_error}") + + except re.error as regex_error: + logger_info.error("Regex substitution error during redaction: %s", regex_error) + logger_debug.exception("Regex substitution error with detailed traceback") + if args.verbose: + print(f"Regex substitution error during redaction: {regex_error}") # Logging the conclusion of the function. If verbosity is enabled, # a print statement notifies the user of the end of the redaction process. @@ -222,19 +222,19 @@ def create_compressed_files(args): '../data/dcpd.csv', '../data/dcpd.db', '../data/dcpd_bootstrap.log', - '../data/dcpd_docker_inspect.csv', '../data/dcpd_caddy.log', + '../data/dcpd_cron.log', + '../data/dcpd_debug.txt', '../data/dcpd_docker_inspect.csv', - '../data/dcpd_docker_ps.cvs', - '../data/dcpd_stats.txt', + '../data/dcpd_docker_ps.csv', + '../data/dcpd_docker_stats.txt', '../data/dcpd_flask.log', '../data/dcpd_gunicorn.log', '../data/dcpd_gunicorn_access.log', '../data/dcpd_gunicorn_error.log', '../data/dcpd_host_networking.csv', - '../data/redacted_dcpd_html.json', '../data/dcpd_stats.txt', - '../data/redacted_dcpd_debug.txt' + '../data/redacted_dcpd_debug.txt', '../data/redacted_dcpd_cache_location.json', '../data/redacted_dcpd_html.json' ]