From 1ee966a24a497fc9b99d73448f29e515f9314607 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 21 Oct 2024 12:42:55 +0100 Subject: [PATCH 01/18] Reformat packages to separate logic and allow for easier testing --- src/slack_watchman/__init__.py | 33 +-- src/slack_watchman/clients/__init__.py | 0 src/slack_watchman/clients/slack_client.py | 228 +++++++++++++++ .../{sw_logger.py => loggers.py} | 0 src/slack_watchman/signature_downloader.py | 2 +- ...slack_wrapper.py => watchman_processor.py} | 264 +----------------- 6 files changed, 260 insertions(+), 267 deletions(-) create mode 100644 src/slack_watchman/clients/__init__.py create mode 100644 src/slack_watchman/clients/slack_client.py rename src/slack_watchman/{sw_logger.py => loggers.py} (100%) rename src/slack_watchman/{slack_wrapper.py => watchman_processor.py} (60%) diff --git a/src/slack_watchman/__init__.py b/src/slack_watchman/__init__.py index f8f108e..40613f6 100644 --- a/src/slack_watchman/__init__.py +++ b/src/slack_watchman/__init__.py @@ -10,10 +10,9 @@ import yaml from slack_watchman import ( - sw_logger, signature_downloader, exceptions, - slack_wrapper + watchman_processor ) from slack_watchman.models import ( signature, @@ -22,8 +21,10 @@ post, conversation ) +from slack_watchman.loggers import StdoutLogger, JSONLogger +from slack_watchman.clients.slack_client import SlackClient -OUTPUT_LOGGER: sw_logger.JSONLogger +OUTPUT_LOGGER: JSONLogger def validate_conf(path: str, cookie: bool) -> bool: @@ -86,7 +87,7 @@ def validate_conf(path: str, cookie: bool) -> bool: raise exceptions.MissingEnvVarError('SLACK_WATCHMAN_URL') -def search(slack_connection: slack_wrapper.SlackAPI, +def search(slack_connection: SlackClient, loaded_signature: signature.Signature, timeframe: int or str, scope: str, @@ -105,7 +106,7 @@ def search(slack_connection: slack_wrapper.SlackAPI, if scope == 'messages': OUTPUT_LOGGER.log('INFO', f'Searching for posts containing {loaded_signature.name}') - messages = slack_wrapper.find_messages( + messages = watchman_processor.find_messages( slack_connection, OUTPUT_LOGGER, loaded_signature, @@ -122,7 +123,7 @@ def search(slack_connection: slack_wrapper.SlackAPI, notify_type='result') if scope == 'files': OUTPUT_LOGGER.log('INFO', f'Searching for posts containing {loaded_signature.name}') - files = slack_wrapper.find_files( + files = watchman_processor.find_files( slack_connection, OUTPUT_LOGGER, loaded_signature, @@ -156,7 +157,7 @@ def unauthenticated_probe(workspace_domain: str, f'and return any available authentication information.') OUTPUT_LOGGER.log('SUCCESS', f'Workspace: {workspace_domain}') try: - domain_information = slack_wrapper.find_auth_information(workspace_domain) + domain_information = watchman_processor.find_auth_information(workspace_domain) if domain_information: OUTPUT_LOGGER.log('WORKSPACE_PROBE', domain_information, detect_type='Workspace Probe', notify_type='workspace_probe') @@ -176,7 +177,7 @@ def unauthenticated_probe(workspace_domain: str, sys.exit(1) -def init_logger(logging_type: str, debug: bool) -> sw_logger.JSONLogger or sw_logger.StdoutLogger: +def init_logger(logging_type: str, debug: bool) -> JSONLogger | StdoutLogger: """ Create a logger object. Defaults to stdout if no option is given Args: @@ -187,9 +188,9 @@ def init_logger(logging_type: str, debug: bool) -> sw_logger.JSONLogger or sw_lo """ if not logging_type or logging_type == 'stdout': - return sw_logger.StdoutLogger(debug=debug) + return StdoutLogger(debug=debug) else: - return sw_logger.JSONLogger(debug=debug) + return JSONLogger(debug=debug) def main(): @@ -270,7 +271,7 @@ def main(): conf_path = f'{os.path.expanduser("~")}/watchman.conf' validate_conf(conf_path, cookie) - slack_con = slack_wrapper.initiate_slack_connection(cookie) + slack_con = watchman_processor.initiate_slack_connection(cookie) auth_data = slack_con.get_auth_test() calling_user = user.create_from_dict( @@ -295,7 +296,7 @@ def main(): OUTPUT_LOGGER.log('USER', calling_user, detect_type='User', notify_type='user') OUTPUT_LOGGER.log('WORKSPACE', workspace_information, detect_type='Workspace', notify_type='workspace') OUTPUT_LOGGER.log('INFO', 'Finding workspace authentication options') - workspace_auth = slack_wrapper.find_auth_information(domain_url=workspace_information.url) + workspace_auth = watchman_processor.find_auth_information(domain_url=workspace_information.url) if workspace_auth: OUTPUT_LOGGER.log('WORKSPACE_AUTH', workspace_auth, detect_type='Workspace Auth', notify_type='workspace_auth') @@ -304,20 +305,20 @@ def main(): if users: OUTPUT_LOGGER.log('INFO', 'Enumerating users...') - user_list = slack_wrapper.get_users(slack_con, verbose) + user_list = watchman_processor.get_users(slack_con, verbose) OUTPUT_LOGGER.log('SUCCESS', f'{len(user_list)} users discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - sw_logger.export_csv('slack_users', user_list) + loggers.export_csv('slack_users', user_list) OUTPUT_LOGGER.log( 'SUCCESS', f'Users output to CSV file: {os.path.join(os.getcwd(), "slack_users.csv")}') if channels: OUTPUT_LOGGER.log('INFO', 'Enumerating channels...') - channel_list = slack_wrapper.get_channels(slack_con, verbose) + channel_list = watchman_processor.get_channels(slack_con, verbose) OUTPUT_LOGGER.log('SUCCESS', f'{len(channel_list)} channels discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - sw_logger.export_csv('slack_channels', channel_list) + loggers.export_csv('slack_channels', channel_list) OUTPUT_LOGGER.log( 'SUCCESS', f'Users output to CSV file: {os.path.join(os.getcwd(), "slack_channels.csv")}') diff --git a/src/slack_watchman/clients/__init__.py b/src/slack_watchman/clients/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/slack_watchman/clients/slack_client.py b/src/slack_watchman/clients/slack_client.py new file mode 100644 index 0000000..22df0d8 --- /dev/null +++ b/src/slack_watchman/clients/slack_client.py @@ -0,0 +1,228 @@ +import json +import re +import requests +import time +import urllib.parse +from typing import List, Dict + +from requests.exceptions import HTTPError +from urllib3.util import Retry +from requests.adapters import HTTPAdapter + +from slack_watchman import exceptions + + +class SlackClient(object): + + def __init__(self, + token: str = None, + cookie: str = None, + url: str = None): + self.token = token + self.session_token = None + self.url = url + self.base_url = 'https://slack.com/api' + self.count = 100 + self.limit = 100 + self.pretty = 1 + self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5)\ + AppleWebKit/537.36 (KHTML, like Gecko) Cafari/537.36' + if cookie: + self.cookie_dict = { + 'd': urllib.parse.quote(urllib.parse.unquote(cookie)) + } + else: + self.cookie_dict = {} + + self.session = session = requests.session() + session.mount( + self.base_url, + HTTPAdapter( + pool_connections=10, + pool_maxsize=10, + max_retries=Retry(total=5, backoff_factor=0.2))) + + if self.token: + session.headers.update({ + 'Connection': 'keep-alive, close', + 'Authorization': f'Bearer {self.token}', + 'User-Agent': self.user_agent + }) + else: + self.session_token = self._get_session_token() + session.headers.update({ + 'Connection': 'keep-alive, close', + 'Authorization': f'Bearer {self.session_token}', + 'User-Agent': self.user_agent + }) + + def _get_session_token(self) -> str: + + r = requests.get(self.url, cookies=self.cookie_dict).text + regex = '(xox[a-zA-Z]-[a-zA-Z0-9-]+)' + + try: + return re.search(regex, r)[0] + except TypeError: + raise exceptions.InvalidCookieError(self.url) + except: + raise + + def _make_request(self, url, params=None, data=None, method='GET', verify_ssl=True): + try: + relative_url = '/'.join((self.base_url, url)) + response = self.session.request( + method, + relative_url, + params=params, + data=data, + cookies=self.cookie_dict, + verify=verify_ssl, + timeout=30) + response.raise_for_status() + + if not response.json().get('ok') and response.json().get('error') == 'missing_scope': + raise exceptions.SlackScopeError(response.json().get('needed')) + elif not response.json().get('ok'): + raise exceptions.SlackAPIError(response.json().get('error')) + else: + return response + + except HTTPError as http_error: + if response.status_code == 429: + print('WARNING', 'Slack API rate limit reached - cooling off') + time.sleep(90) + return self.session.request( + method, + relative_url, + params=params, + data=data, + cookies=self.cookie_dict, + verify=verify_ssl, + timeout=30) + else: + raise HTTPError(f'HTTPError: {http_error}') + except: + raise + + def _get_pages(self, url, scope, params): + first_page = self._make_request(url, params).json() + yield first_page + num_pages = first_page.get(scope).get('pagination').get('page_count') + + for page in range(2, num_pages + 1): + params['page'] = str(page) + next_page = self._make_request(url, params=params).json() + yield next_page + + def page_api_search(self, + query: str, + url: str, + scope: str, + timeframe: str or int) -> List[Dict]: + """ Wrapper for Slack API methods that use page number based pagination + + Args: + query: Search to carry out in Slack API + url: API endpoint to use + scope: What to search for, e.g. files or messages + timeframe: How far back to search + Returns: + A list of dict objects with responses + """ + + results = [] + params = { + 'query': f'after:{timeframe} {query}', + 'pretty': self.pretty, + 'count': self.count + } + + for page in self._get_pages(url, scope, params): + for value in page.get(scope).get('matches'): + results.append(value) + + return results + + def cursor_api_search(self, url: str, scope: str) -> List[Dict]: + """ Wrapper for Slack API methods that use cursor based pagination + + Args: + url: API endpoint to use + scope: What to search for, e.g. files or messages + Returns: + A list of dict objects with responses + """ + + results = [] + params = { + 'pretty': self.pretty, + 'limit': self.limit, + 'cursor': '' + } + + r = self._make_request(url, params=params).json() + for value in r.get(scope): + results.append(value) + + if str(r.get('ok')) == 'False': + raise exceptions.SlackAPIError(r.get('error')) + else: + cursor = r.get('response_metadata').get('next_cursor') + while str(r.get('ok')) == 'True' and cursor: + params['limit'], params['cursor'] = 200, cursor + r = self._make_request(url, params=params).json() + for value in r.get(scope): + cursor = r.get('response_metadata').get('next_cursor') + results.append(value) + + return results + + def get_user_info(self, user_id: str) -> json: + """ Get the user for the given ID + + Args: + user_id: ID of the user to return + Returns: + JSON object with user information + """ + + params = { + 'user': user_id + } + + return self._make_request('users.info', params=params).json() + + def get_conversation_info(self, conversation_id: str) -> json: + """ Get the conversation for the given ID + + Args: + conversation_id: ID of the conversation to return + Returns: + JSON object with conversation information + """ + + params = { + 'channel': conversation_id + } + + return self._make_request('conversations.info', params=params).json() + + def get_workspace_info(self) -> str or None: + """ Returns the information of the workspace the token is associated with + + Returns: + JSON object with workspace information + """ + + return self._make_request('team.info').json() + + def get_auth_test(self) -> str or None: + """ Carries out an auth test against the calling token, and replies with + user information + + Returns: + JSON object with auth test response + """ + + return self._make_request('auth.test').json() \ No newline at end of file diff --git a/src/slack_watchman/sw_logger.py b/src/slack_watchman/loggers.py similarity index 100% rename from src/slack_watchman/sw_logger.py rename to src/slack_watchman/loggers.py diff --git a/src/slack_watchman/signature_downloader.py b/src/slack_watchman/signature_downloader.py index e77f973..f0acc8a 100644 --- a/src/slack_watchman/signature_downloader.py +++ b/src/slack_watchman/signature_downloader.py @@ -8,7 +8,7 @@ import yaml -from slack_watchman.sw_logger import JSONLogger, StdoutLogger +from slack_watchman.loggers import JSONLogger, StdoutLogger from slack_watchman.models.signature import Signature, TestCases SIGNATURE_URL = 'https://github.com/PaperMtn/watchman-signatures/archive/main.zip' diff --git a/src/slack_watchman/slack_wrapper.py b/src/slack_watchman/watchman_processor.py similarity index 60% rename from src/slack_watchman/slack_wrapper.py rename to src/slack_watchman/watchman_processor.py index a373872..8e40199 100644 --- a/src/slack_watchman/slack_wrapper.py +++ b/src/slack_watchman/watchman_processor.py @@ -4,257 +4,21 @@ import os import re import requests -import time import dataclasses import yaml -import urllib.parse from typing import List, Dict -from requests.exceptions import HTTPError -from urllib3.util import Retry -from requests.adapters import HTTPAdapter from bs4 import BeautifulSoup -from slack_watchman import sw_logger, exceptions +from slack_watchman import exceptions +from slack_watchman.loggers import StdoutLogger, JSONLogger from slack_watchman.models import ( signature, user, post, conversation ) - - -class SlackAPI(object): - - def __init__(self, - token: str = None, - cookie: str = None, - url: str = None): - self.token = token - self.session_token = None - self.url = url - self.base_url = 'https://slack.com/api' - self.count = 100 - self.limit = 100 - self.pretty = 1 - self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5)\ - AppleWebKit/537.36 (KHTML, like Gecko) Cafari/537.36' - if cookie: - self.cookie_dict = { - 'd': urllib.parse.quote(urllib.parse.unquote(cookie)) - } - else: - self.cookie_dict = {} - - self.session = session = requests.session() - session.mount( - self.base_url, - HTTPAdapter( - pool_connections=10, - pool_maxsize=10, - max_retries=Retry(total=5, backoff_factor=0.2))) - - if self.token: - session.headers.update({ - 'Connection': 'keep-alive, close', - 'Authorization': f'Bearer {self.token}', - 'User-Agent': self.user_agent - }) - else: - self.session_token = self._get_session_token() - session.headers.update({ - 'Connection': 'keep-alive, close', - 'Authorization': f'Bearer {self.session_token}', - 'User-Agent': self.user_agent - }) - - def _get_session_token(self) -> str: - - r = requests.get(self.url, cookies=self.cookie_dict).text - regex = '(xox[a-zA-Z]-[a-zA-Z0-9-]+)' - - try: - return re.search(regex, r)[0] - except TypeError: - raise exceptions.InvalidCookieError(self.url) - except: - raise - - def _make_request(self, url, params=None, data=None, method='GET', verify_ssl=True): - try: - relative_url = '/'.join((self.base_url, url)) - response = self.session.request( - method, - relative_url, - params=params, - data=data, - cookies=self.cookie_dict, - verify=verify_ssl, - timeout=30) - response.raise_for_status() - - if not response.json().get('ok') and response.json().get('error') == 'missing_scope': - raise exceptions.SlackScopeError(response.json().get('needed')) - elif not response.json().get('ok'): - raise exceptions.SlackAPIError(response.json().get('error')) - else: - return response - - except HTTPError as http_error: - if response.status_code == 429: - print('WARNING', 'Slack API rate limit reached - cooling off') - time.sleep(90) - return self.session.request( - method, - relative_url, - params=params, - data=data, - cookies=self.cookie_dict, - verify=verify_ssl, - timeout=30) - else: - raise HTTPError(f'HTTPError: {http_error}') - except: - raise - - def _get_pages(self, url, scope, params): - first_page = self._make_request(url, params).json() - yield first_page - num_pages = first_page.get(scope).get('pagination').get('page_count') - - for page in range(2, num_pages + 1): - params['page'] = str(page) - next_page = self._make_request(url, params=params).json() - yield next_page - - def page_api_search(self, - query: str, - url: str, - scope: str, - timeframe: str or int) -> List[Dict]: - """ Wrapper for Slack API methods that use page number based pagination - - Args: - query: Search to carry out in Slack API - url: API endpoint to use - scope: What to search for, e.g. files or messages - timeframe: How far back to search - Returns: - A list of dict objects with responses - """ - - results = [] - params = { - 'query': f'after:{timeframe} {query}', - 'pretty': self.pretty, - 'count': self.count - } - - for page in self._get_pages(url, scope, params): - for value in page.get(scope).get('matches'): - results.append(value) - - return results - - def cursor_api_search(self, url: str, scope: str) -> List[Dict]: - """ Wrapper for Slack API methods that use cursor based pagination - - Args: - url: API endpoint to use - scope: What to search for, e.g. files or messages - Returns: - A list of dict objects with responses - """ - - results = [] - params = { - 'pretty': self.pretty, - 'limit': self.limit, - 'cursor': '' - } - - r = self._make_request(url, params=params).json() - for value in r.get(scope): - results.append(value) - - if str(r.get('ok')) == 'False': - raise exceptions.SlackAPIError(r.get('error')) - else: - cursor = r.get('response_metadata').get('next_cursor') - while str(r.get('ok')) == 'True' and cursor: - params['limit'], params['cursor'] = 200, cursor - r = self._make_request(url, params=params).json() - for value in r.get(scope): - cursor = r.get('response_metadata').get('next_cursor') - results.append(value) - - return results - - def get_user_info(self, user_id: str) -> json: - """ Get the user for the given ID - - Args: - user_id: ID of the user to return - Returns: - JSON object with user information - """ - - params = { - 'user': user_id - } - - return self._make_request('users.info', params=params).json() - - def get_conversation_info(self, conversation_id: str) -> json: - """ Get the conversation for the given ID - - Args: - conversation_id: ID of the conversation to return - Returns: - JSON object with conversation information - """ - - params = { - 'channel': conversation_id - } - - return self._make_request('conversations.info', params=params).json() - - def get_workspace_info(self) -> str or None: - """ Returns the information of the workspace the token is associated with - - Returns: - JSON object with workspace information - """ - - return self._make_request('team.info').json() - - def get_auth_test(self) -> str or None: - """ Carries out an auth test against the calling token, and replies with - user information - - Returns: - JSON object with auth test response - """ - - return self._make_request('auth.test').json() - - -def _convert_timestamp(timestamp: int) -> str: - """ Converts epoch timestamp into human-readable time - - Args: - timestamp: Epoch formatted timestamp - Returns: - String time in the format %Y-%m-%d %H:%M:%S - """ - - if isinstance(timestamp, str): - timestamp = timestamp.split('.', 1)[0] - - output = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) - - return output +from slack_watchman.clients.slack_client import SlackClient def _deduplicate(input_list: list) -> List[Dict]: @@ -279,7 +43,7 @@ def default(self, o): return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() -def initiate_slack_connection(cookie: bool) -> SlackAPI: +def initiate_slack_connection(cookie: bool) -> SlackClient: """ Create a Slack API object to use for interacting with the Slack API First tries to get the API token from the environment variable(s): SLACK_WATCHMAN_TOKEN @@ -302,7 +66,7 @@ def initiate_slack_connection(cookie: bool) -> SlackAPI: token = config['slack_watchman']['token'] except: raise exceptions.MissingConfigVariable('token') - return SlackAPI(token=token) + return SlackClient(token=token) else: try: cookie = os.environ['SLACK_WATCHMAN_COOKIE'] @@ -318,10 +82,10 @@ def initiate_slack_connection(cookie: bool) -> SlackAPI: url = config['slack_watchman']['url'] except: raise exceptions.MissingConfigVariable('url') - return SlackAPI(cookie=cookie, url=url) + return SlackClient(cookie=cookie, url=url) -def get_users(slack: SlackAPI, verbose: bool) -> List[user.User]: +def get_users(slack: SlackClient, verbose: bool) -> List[user.User]: """ Return a list of all active users in the instance Args: @@ -340,7 +104,7 @@ def get_users(slack: SlackAPI, verbose: bool) -> List[user.User]: return results -def get_channels(slack: SlackAPI, +def get_channels(slack: SlackClient, verbose: bool) -> List[conversation.Conversation] or List[conversation.ConversationSuccinct]: """ Return a list of all channels in the instance @@ -355,8 +119,8 @@ def get_channels(slack: SlackAPI, return [conversation.create_from_dict(item, verbose) for item in conversations] -def find_messages(slack: SlackAPI, - logger: sw_logger.JSONLogger, +def find_messages(slack: SlackClient, + logger: JSONLogger | StdoutLogger, sig: signature.Signature, verbose: bool, timeframe: str) -> List[Dict]: @@ -414,7 +178,7 @@ def find_messages(slack: SlackAPI, logger.log('CRITICAL', e) -def _multipro_message_worker(slack: SlackAPI, +def _multipro_message_worker(slack: SlackClient, sig: signature.Signature, query: str, verbose: bool, @@ -454,8 +218,8 @@ def _multipro_message_worker(slack: SlackAPI, return kwargs.get('results'), kwargs.get('potential_matches') -def find_files(slack: SlackAPI, - logger: sw_logger.JSONLogger, +def find_files(slack: SlackClient, + logger: JSONLogger | StdoutLogger, sig: signature.Signature, verbose: bool, timeframe: str) -> List[Dict]: @@ -513,7 +277,7 @@ def find_files(slack: SlackAPI, logger.log('CRITICAL', e) -def _multipro_file_worker(slack: SlackAPI, +def _multipro_file_worker(slack: SlackClient, sig: signature.Signature, query: str, verbose: bool, From 5dde00a3995242b3a71dcdb962a2558c7cbbf898 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Mon, 21 Oct 2024 13:48:02 +0100 Subject: [PATCH 02/18] Added utils module and updated modules to use the new util functions --- src/slack_watchman/models/conversation.py | 23 ++---------- src/slack_watchman/models/post.py | 24 ++---------- src/slack_watchman/models/user.py | 21 +---------- src/slack_watchman/models/workspace.py | 19 ---------- src/slack_watchman/utils.py | 44 ++++++++++++++++++++++ src/slack_watchman/watchman_processor.py | 45 ++++++++++++----------- 6 files changed, 75 insertions(+), 101 deletions(-) create mode 100644 src/slack_watchman/utils.py diff --git a/src/slack_watchman/models/conversation.py b/src/slack_watchman/models/conversation.py index 733e241..71d4936 100644 --- a/src/slack_watchman/models/conversation.py +++ b/src/slack_watchman/models/conversation.py @@ -1,24 +1,7 @@ -import time from dataclasses import dataclass from typing import List, Dict, Optional - -def _convert_timestamp(timestamp: str or int) -> str or None: - """ Converts epoch timestamp into human-readable time - - Args: - timestamp: epoch timestamp in seconds - Returns: - String time in the format YYYY-mm-dd hh:mm:ss - """ - - if timestamp: - if isinstance(timestamp, str): - timestamp = timestamp.split('.', 1)[0] - - return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) - else: - return None +from slack_watchman.utils import convert_timestamp @dataclass(slots=True) @@ -93,7 +76,7 @@ def create_from_dict(conv_dict: Dict, verbose: bool) -> Conversation or Conversa return Conversation( id=conv_dict.get('id'), name=conv_dict.get('name'), - created=_convert_timestamp(conv_dict.get('created')), + created=convert_timestamp(conv_dict.get('created')), num_members=conv_dict.get('num_members'), is_general=conv_dict.get('is_general'), is_private=conv_dict.get('is_private'), @@ -119,7 +102,7 @@ def create_from_dict(conv_dict: Dict, verbose: bool) -> Conversation or Conversa return ConversationSuccinct( id=conv_dict.get('id'), name=conv_dict.get('name'), - created=_convert_timestamp(conv_dict.get('created')), + created=convert_timestamp(conv_dict.get('created')), num_members=conv_dict.get('num_members'), is_private=conv_dict.get('is_private'), is_im=conv_dict.get('is_im'), diff --git a/src/slack_watchman/models/post.py b/src/slack_watchman/models/post.py index 5cf6653..cbe6f87 100644 --- a/src/slack_watchman/models/post.py +++ b/src/slack_watchman/models/post.py @@ -1,26 +1,8 @@ -import time from dataclasses import dataclass from typing import List, Dict from slack_watchman.models import conversation, user - - -def _convert_timestamp(timestamp: str or int) -> str or None: - """ Converts epoch timestamp into human-readable time - - Args: - timestamp: epoch timestamp in seconds - Returns: - String time in the format YYYY-mm-dd hh:mm:ss - """ - - if timestamp: - if isinstance(timestamp, str): - timestamp = timestamp.split('.', 1)[0] - - return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) - else: - return None +from slack_watchman.utils import convert_timestamp @dataclass(slots=True) @@ -73,7 +55,7 @@ def create_message_from_dict(message_dict: Dict) -> Message: return Message( id=message_dict.get('iid'), team=message_dict.get('team'), - created=_convert_timestamp(message_dict.get('ts')), + created=convert_timestamp(message_dict.get('ts')), timestamp=message_dict.get('ts'), conversation=message_dict.get('conversation'), user=message_dict.get('user'), @@ -97,7 +79,7 @@ def create_file_from_dict(file_dict: Dict) -> File: return File( id=file_dict.get('id'), team=file_dict.get('source_team'), - created=_convert_timestamp(file_dict.get('created')), + created=convert_timestamp(file_dict.get('created')), user=file_dict.get('user'), name=file_dict.get('name'), title=file_dict.get('title'), diff --git a/src/slack_watchman/models/user.py b/src/slack_watchman/models/user.py index a3e3dba..5dfbd54 100644 --- a/src/slack_watchman/models/user.py +++ b/src/slack_watchman/models/user.py @@ -1,24 +1,7 @@ -import time from dataclasses import dataclass from typing import Dict - -def _convert_timestamp(timestamp: str or int) -> str or None: - """ Converts epoch timestamp into human-readable time - - Args: - timestamp: epoch timestamp in seconds - Returns: - String time in the format YYYY-mm-dd hh:mm:ss - """ - - if timestamp: - if isinstance(timestamp, str): - timestamp = timestamp.split('.', 1)[0] - - return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) - else: - return None +from slack_watchman.utils import convert_timestamp @dataclass(slots=True) @@ -95,7 +78,7 @@ def create_from_dict(user_dict: Dict, is_restricted=user_dict.get('is_restricted'), is_ultra_restricted=user_dict.get('is_ultra_restricted'), is_bot=user_dict.get('is_bot'), - updated=_convert_timestamp(user_dict.get('updated')), + updated=convert_timestamp(user_dict.get('updated')), has_2fa=user_dict.get('has_2fa') ) else: diff --git a/src/slack_watchman/models/workspace.py b/src/slack_watchman/models/workspace.py index e490271..e2b4586 100644 --- a/src/slack_watchman/models/workspace.py +++ b/src/slack_watchman/models/workspace.py @@ -1,26 +1,7 @@ -import time from dataclasses import dataclass from typing import Optional, Dict -def _convert_timestamp(timestamp: str or int) -> str or None: - """ Converts epoch timestamp into human-readable time - - Args: - timestamp: epoch timestamp in seconds - Returns: - String time in the format YYYY-mm-dd hh:mm:ss - """ - - if timestamp: - if isinstance(timestamp, str): - timestamp = timestamp.split('.', 1)[0] - - return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) - else: - return None - - @dataclass(slots=True) class Workspace(object): """ Class that defines Workspaces objects. Workspaces are collections diff --git a/src/slack_watchman/utils.py b/src/slack_watchman/utils.py new file mode 100644 index 0000000..55fa675 --- /dev/null +++ b/src/slack_watchman/utils.py @@ -0,0 +1,44 @@ +import time +import json +import dataclasses +from typing import List, Dict + + +def convert_timestamp(timestamp: str or int) -> str or None: + """ Converts epoch timestamp into human-readable time + + Args: + timestamp: epoch timestamp in seconds + Returns: + String time in the format YYYY-mm-dd hh:mm:ss + """ + + if timestamp: + if isinstance(timestamp, str): + timestamp = timestamp.split('.', 1)[0] + + return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) + else: + return None + + +def deduplicate_dataclass(input_list: list) -> List[Dict]: + """ Removes duplicates where results are returned by multiple queries + Nested class handles JSON encoding for dataclass objects + + Args: + input_list: List of dataclass objects + Returns: + List of JSON objects with duplicates removed + """ + + class EnhancedJSONEncoder(json.JSONEncoder): + def default(self, o): + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) + + json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} + + deduped_list = [json.loads(t) for t in json_set] + return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() \ No newline at end of file diff --git a/src/slack_watchman/watchman_processor.py b/src/slack_watchman/watchman_processor.py index 8e40199..0354e58 100644 --- a/src/slack_watchman/watchman_processor.py +++ b/src/slack_watchman/watchman_processor.py @@ -11,6 +11,7 @@ from bs4 import BeautifulSoup from slack_watchman import exceptions +from slack_watchman.utils import deduplicate_dataclass from slack_watchman.loggers import StdoutLogger, JSONLogger from slack_watchman.models import ( signature, @@ -21,26 +22,26 @@ from slack_watchman.clients.slack_client import SlackClient -def _deduplicate(input_list: list) -> List[Dict]: - """ Removes duplicates where results are returned by multiple queries - Nested class handles JSON encoding for dataclass objects - - Args: - input_list: List of dataclass objects - Returns: - List of JSON objects with duplicates removed - """ - - class EnhancedJSONEncoder(json.JSONEncoder): - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) - - json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} - - deduped_list = [json.loads(t) for t in json_set] - return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() +# def _deduplicate(input_list: list) -> List[Dict]: +# """ Removes duplicates where results are returned by multiple queries +# Nested class handles JSON encoding for dataclass objects +# +# Args: +# input_list: List of dataclass objects +# Returns: +# List of JSON objects with duplicates removed +# """ +# +# class EnhancedJSONEncoder(json.JSONEncoder): +# def default(self, o): +# if dataclasses.is_dataclass(o): +# return dataclasses.asdict(o) +# return super().default(o) +# +# json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} +# +# deduped_list = [json.loads(t) for t in json_set] +# return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() def initiate_slack_connection(cookie: bool) -> SlackClient: @@ -168,7 +169,7 @@ def find_messages(slack: SlackClient, logger.log('INFO', f'{sum(potential_matches)} potential matches found') if results: - results = _deduplicate(results) + results = deduplicate_dataclass(results) logger.log('SUCCESS', f'{len(results)} total matches found after filtering') return results else: @@ -267,7 +268,7 @@ def find_files(slack: SlackClient, logger.log('INFO', f'{sum(potential_matches)} potential matches found') if results: - results = _deduplicate(results) + results = deduplicate_dataclass(results) logger.log('SUCCESS', f'{len(results)} total files found after filtering') return results else: From ba53cb9ae1c21ce84ba03eb2f60d7bcff38865d6 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Tue, 22 Oct 2024 09:52:27 +0100 Subject: [PATCH 03/18] Broken down and clarified the deduplication function --- src/slack_watchman/loggers.py | 9 ++--- src/slack_watchman/utils.py | 46 ++++++++++++++++-------- src/slack_watchman/watchman_processor.py | 28 ++------------- 3 files changed, 36 insertions(+), 47 deletions(-) diff --git a/src/slack_watchman/loggers.py b/src/slack_watchman/loggers.py index c97da4f..c7446d1 100644 --- a/src/slack_watchman/loggers.py +++ b/src/slack_watchman/loggers.py @@ -12,6 +12,8 @@ from typing import Any, Dict, List, ClassVar, Protocol from colorama import Fore, Back, Style, init +from slack_watchman.utils import EnhancedJSONEncoder + class StdoutLogger: def __init__(self, **kwargs): @@ -238,13 +240,6 @@ def print_header(self) -> None: print(' '.ljust(79) + Fore.GREEN) -class EnhancedJSONEncoder(json.JSONEncoder): - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) - - class JSONLogger(Logger): def __init__(self, name: str = 'Slack Watchman', **kwargs): super().__init__(name) diff --git a/src/slack_watchman/utils.py b/src/slack_watchman/utils.py index 55fa675..7a07b39 100644 --- a/src/slack_watchman/utils.py +++ b/src/slack_watchman/utils.py @@ -1,7 +1,14 @@ import time import json import dataclasses -from typing import List, Dict +from typing import List, Dict, Any + + +class EnhancedJSONEncoder(json.JSONEncoder): + def default(self, o): + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) def convert_timestamp(timestamp: str or int) -> str or None: @@ -17,28 +24,37 @@ def convert_timestamp(timestamp: str or int) -> str or None: if isinstance(timestamp, str): timestamp = timestamp.split('.', 1)[0] - return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(timestamp))) + return time.strftime('%Y-%m-%d %H:%M:%S %Z', time.gmtime(int(timestamp))) else: return None -def deduplicate_dataclass(input_list: list) -> List[Dict]: - """ Removes duplicates where results are returned by multiple queries - Nested class handles JSON encoding for dataclass objects +def convert_to_dict(obj: Any) -> Dict: + """ Returns a dictionary object from a dataclass object or a dict + containing nested dataclass objects. Args: - input_list: List of dataclass objects + obj: dataclass object or dict Returns: - List of JSON objects with duplicates removed + Dictionary object """ - class EnhancedJSONEncoder(json.JSONEncoder): - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) + json_object = json.dumps(obj, sort_keys=True, cls=EnhancedJSONEncoder) + return json.loads(json_object) + + +def deduplicate_results(input_list: List[Any]) -> List[Dict]: + """ Removes duplicates where results are returned by multiple queries. This is done + using the `watchman_id` field in the detection data to identify the same findings. - json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} + The `watchman_id` is a hash that is generated for each finding from the match string and the + timestamp, meaning the same message won't be returned multiple times. + + Args: + input_list: List of dataclass objects + Returns: + List of JSON objects with duplicates removed + """ - deduped_list = [json.loads(t) for t in json_set] - return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() \ No newline at end of file + converted_dict_list = [convert_to_dict(t) for t in input_list] + return list({match.get('watchman_id'): match for match in reversed(converted_dict_list)}.values()) diff --git a/src/slack_watchman/watchman_processor.py b/src/slack_watchman/watchman_processor.py index 0354e58..40538cf 100644 --- a/src/slack_watchman/watchman_processor.py +++ b/src/slack_watchman/watchman_processor.py @@ -11,7 +11,7 @@ from bs4 import BeautifulSoup from slack_watchman import exceptions -from slack_watchman.utils import deduplicate_dataclass +from slack_watchman.utils import deduplicate_results from slack_watchman.loggers import StdoutLogger, JSONLogger from slack_watchman.models import ( signature, @@ -22,28 +22,6 @@ from slack_watchman.clients.slack_client import SlackClient -# def _deduplicate(input_list: list) -> List[Dict]: -# """ Removes duplicates where results are returned by multiple queries -# Nested class handles JSON encoding for dataclass objects -# -# Args: -# input_list: List of dataclass objects -# Returns: -# List of JSON objects with duplicates removed -# """ -# -# class EnhancedJSONEncoder(json.JSONEncoder): -# def default(self, o): -# if dataclasses.is_dataclass(o): -# return dataclasses.asdict(o) -# return super().default(o) -# -# json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} -# -# deduped_list = [json.loads(t) for t in json_set] -# return {match.get('watchman_id'): match for match in reversed(deduped_list)}.values() - - def initiate_slack_connection(cookie: bool) -> SlackClient: """ Create a Slack API object to use for interacting with the Slack API First tries to get the API token from the environment variable(s): @@ -169,7 +147,7 @@ def find_messages(slack: SlackClient, logger.log('INFO', f'{sum(potential_matches)} potential matches found') if results: - results = deduplicate_dataclass(results) + results = deduplicate_results(results) logger.log('SUCCESS', f'{len(results)} total matches found after filtering') return results else: @@ -268,7 +246,7 @@ def find_files(slack: SlackClient, logger.log('INFO', f'{sum(potential_matches)} potential matches found') if results: - results = deduplicate_dataclass(results) + results = deduplicate_results(results) logger.log('SUCCESS', f'{len(results)} total files found after filtering') return results else: From b391472009c99177724f3b542c1854189221ddf4 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Tue, 22 Oct 2024 09:55:58 +0100 Subject: [PATCH 04/18] Unit tests for the `utils` package --- tests/unit/test_unit_utils.py | 123 ++++++++++++++++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 tests/unit/test_unit_utils.py diff --git a/tests/unit/test_unit_utils.py b/tests/unit/test_unit_utils.py new file mode 100644 index 0000000..5cc724d --- /dev/null +++ b/tests/unit/test_unit_utils.py @@ -0,0 +1,123 @@ +from dataclasses import dataclass +from typing import Dict, Any + +import pytest + +from slack_watchman.utils import ( + convert_timestamp, + convert_to_dict, + deduplicate_results +) + + +def test_convert_timestamp(): + # Test with integer timestamp + timestamp = 1704067200 + expected_output = "2024-01-01 00:00:00 UTC" + assert convert_timestamp(timestamp) == expected_output + + # Test with string timestamp + timestamp = "1704067200.000" + expected_output = "2024-01-01 00:00:00 UTC" + assert convert_timestamp(timestamp) == expected_output + + # Test with None input + timestamp = None + expected_output = None + assert convert_timestamp(timestamp) == expected_output + + +def test_convert_timestamp_edge_cases(): + # Test with very large timestamp + timestamp = 2 ** 31 - 1 + expected_output = "2038-01-19 03:14:07 UTC" + assert convert_timestamp(timestamp) == expected_output + + # Test with very small timestamp + timestamp = 1 + expected_output = "1970-01-01 00:00:01 UTC" + assert convert_timestamp(timestamp) == expected_output + + +@dataclass +class TestClass: + __test__ = False + name: str + age: int + + +@pytest.fixture +def simple_example_result() -> Dict[Any, Any]: + return { + "file": { + "created": "2024-01-01 00:00:00 UTC", + "editable": False, + "user": "UABC123" + }, + "user": { + "name": "Joe Bloggs", + "age": 30, + }, + "watchman_id": "abc123" + } + + +@pytest.fixture +def dataclass_example_result_one() -> Dict[Any, Any]: + return { + "file": { + "created": "2024-01-01 00:00:00 UTC", + "editable": False, + "user": "UABC123" + }, + "user": TestClass(name='Joe Bloggs', age=30), + "watchman_id": "abc123" + } + + +@pytest.fixture +def dataclass_example_result_two() -> Dict[Any, Any]: + return { + "match_string": "2840631", + "message": { + "created": "2024-01-01 00:00:00 UTC", + "id": "abcdefghijklmnopqrstuvwxyz", + "permalink": "https://example.com", + "text": "This is a message", + "timestamp": "1729257170.452549", + "type": "message", + "user": TestClass(name='John Smith', age=30) + }, + "watchman_id": "abc1234" + } + + +def test_convert_to_dict(simple_example_result: Dict[Any, Any], + dataclass_example_result_one: Dict[Any, Any]) -> None: + # Test with simple example + assert convert_to_dict(simple_example_result) == simple_example_result + + # Test with dataclass example + assert convert_to_dict(dataclass_example_result_one) == simple_example_result + + +def test_deduplicate_results(simple_example_result: Dict[Any, Any], + dataclass_example_result_one: Dict[Any, Any], + dataclass_example_result_two: Dict[Any, Any]) -> None: + # Test with a single result + assert deduplicate_results([simple_example_result]) == [simple_example_result] + + # Test with multiple results containing duplicates + assert deduplicate_results([simple_example_result, simple_example_result]) == [ + simple_example_result] + + # Test with dataclass example + assert deduplicate_results([dataclass_example_result_one]) == [convert_to_dict(dataclass_example_result_one)] + + # Test with multiple dataclass examples with no duplicates + assert deduplicate_results([dataclass_example_result_one, dataclass_example_result_two]) == [ + convert_to_dict(dataclass_example_result_two), convert_to_dict(dataclass_example_result_one)] + + # Test with multiple dataclass examples with duplicates + assert (deduplicate_results([dataclass_example_result_one, dataclass_example_result_one]) == + [convert_to_dict(dataclass_example_result_one)]) From ba500751a02d5b9ec6cdca632f3db8bfa6683a8f Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 20:38:50 +0100 Subject: [PATCH 05/18] Variable validation and fixes based on testing --- src/slack_watchman/models/conversation.py | 74 +++++++++++++++++++ src/slack_watchman/models/post.py | 85 ++++++++++++++++++++-- src/slack_watchman/models/signature.py | 82 +++++++++++++++++++-- src/slack_watchman/models/user.py | 84 ++++++++++++++++++--- src/slack_watchman/models/workspace.py | 12 +++ src/slack_watchman/signature_downloader.py | 24 +----- 6 files changed, 319 insertions(+), 42 deletions(-) diff --git a/src/slack_watchman/models/conversation.py b/src/slack_watchman/models/conversation.py index 71d4936..6274c5c 100644 --- a/src/slack_watchman/models/conversation.py +++ b/src/slack_watchman/models/conversation.py @@ -38,6 +38,54 @@ class Conversation(object): is_group: bool is_channel: bool + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.name).__name__}') + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.created and not (isinstance(self.created, str) or isinstance(self.created, int) or isinstance(self.created, float)): + raise TypeError(f'Expected `created` to be of type str or int or float, received {type(self.name).__name__}') + if self.is_private and not isinstance(self.is_private, bool): + raise TypeError(f'Expected `is_private` to be of type bool, received {type(self.name).__name__}') + if self.is_im and not isinstance(self.is_im, bool): + raise TypeError(f'Expected `is_im` to be of type bool, received {type(self.name).__name__}') + if self.is_mpim and not isinstance(self.is_mpim, bool): + raise TypeError(f'Expected `is_mpim` to be of type bool, received {type(self.name).__name__}') + if self.is_archived and not isinstance(self.is_archived, bool): + raise TypeError(f'Expected `is_archived` to be of type bool, received {type(self.name).__name__}') + if self.is_general and not isinstance(self.is_general, bool): + raise TypeError(f'Expected `is_general` to be of type bool, received {type(self.name).__name__}') + if self.creator and not isinstance(self.creator, str): + raise TypeError(f'Expected `creator` to be of type str, received {type(self.name).__name__}') + if self.name_normalized and not isinstance(self.name_normalized, str): + raise TypeError(f'Expected `name_normalized` to be of type str, received {type(self.name).__name__}') + if self.previous_names and not isinstance(self.previous_names, list): + raise TypeError(f'Expected `previous_names` to be of type list, received {type(self.name).__name__}') + if self.purpose and not isinstance(self.purpose, str): + raise TypeError(f'Expected `purpose` to be of type str, received {type(self.name).__name__}') + if self.topic and not isinstance(self.topic, str): + raise TypeError(f'Expected `topic` to be of type str, received {type(self.name).__name__}') + if self.canvas_empty and not isinstance(self.canvas_empty, bool): + raise TypeError(f'Expected `canvas_empty` to be of type bool, received {type(self.name).__name__}') + if self.canvas_id and not isinstance(self.canvas_id, str): + raise TypeError(f'Expected `canvas_id` to be of type str, received {type(self.name).__name__}') + if self.num_members and not isinstance(self.num_members, int): + raise TypeError(f'Expected `num_members` to be of type int, received {type(self.name).__name__}') + if self.is_member and not isinstance(self.is_member, bool): + raise TypeError(f'Expected `is_member` to be of type bool, received {type(self.name).__name__}') + if self.is_pending_ext_shared and not isinstance(self.is_pending_ext_shared, bool): + raise TypeError(f'Expected `is_pending_ext_shared` to be of type bool, received {type(self.name).__name__}') + if self.is_ext_shared and not isinstance(self.is_ext_shared, bool): + raise TypeError(f'Expected `is_ext_shared` to be of type bool, received {type(self.name).__name__}') + if self.is_shared and not isinstance(self.is_shared, bool): + raise TypeError(f'Expected `is_shared` to be of type bool, received {type(self.name).__name__}') + if self.is_org_shared and not isinstance(self.is_org_shared, bool): + raise TypeError(f'Expected `is_org_shared` to be of type bool, received {type(self.name).__name__}') + if self.is_group and not isinstance(self.is_group, bool): + raise TypeError(f'Expected `is_group` to be of type bool, received {type(self.name).__name__}') + if self.is_channel and not isinstance(self.is_channel, bool): + raise TypeError(f'Expected `is_channel` to be of type bool, received {type(self.name).__name__}') + @dataclass(slots=True) class ConversationSuccinct(object): @@ -61,6 +109,32 @@ class ConversationSuccinct(object): creator: str num_members: int + def __post_init__(self): + if self.is_private and not isinstance(self.is_private, bool): + raise TypeError(f'Expected `is_private` to be of type bool, received {type(self.name).__name__}') + if self.is_im and not isinstance(self.is_im, bool): + raise TypeError(f'Expected `is_im` to be of type bool, received {type(self.name).__name__}') + if self.is_mpim and not isinstance(self.is_mpim, bool): + raise TypeError(f'Expected `is_mpim` to be of type bool, received {type(self.name).__name__}') + if self.is_archived and not isinstance(self.is_archived, bool): + raise TypeError(f'Expected `is_archived` to be of type bool, received {type(self.name).__name__}') + if self.is_private and not isinstance(self.is_private, bool): + raise TypeError(f'Expected `is_private` to be of type bool, received {type(self.name).__name__}') + if self.is_im and not isinstance(self.is_im, bool): + raise TypeError(f'Expected `is_im` to be of type bool, received {type(self.name).__name__}') + if self.is_mpim and not isinstance(self.is_mpim, bool): + raise TypeError(f'Expected `is_mpim` to be of type bool, received {type(self.name).__name__}') + if self.is_archived and not isinstance(self.is_archived, bool): + raise TypeError(f'Expected `is_archived` to be of type bool, received {type(self.name).__name__}') + if self.canvas_empty and not isinstance(self.canvas_empty, bool): + raise TypeError(f'Expected `canvas_empty` to be of type bool, received {type(self.name).__name__}') + if self.canvas_id and not isinstance(self.canvas_id, str): + raise TypeError(f'Expected `canvas_id` to be of type str, received {type(self.name).__name__}') + if self.creator and not isinstance(self.creator, str): + raise TypeError(f'Expected `creator` to be of type str, received {type(self.name).__name__}') + if self.num_members and not isinstance(self.num_members, int): + raise TypeError(f'Expected `num_members` to be of type int, received {type(self.name).__name__}') + def create_from_dict(conv_dict: Dict, verbose: bool) -> Conversation or ConversationSuccinct: """ Create a User object from a dict response from the Slack API diff --git a/src/slack_watchman/models/post.py b/src/slack_watchman/models/post.py index cbe6f87..0fe3183 100644 --- a/src/slack_watchman/models/post.py +++ b/src/slack_watchman/models/post.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import List, Dict +from typing import List, Dict, Any from slack_watchman.models import conversation, user from slack_watchman.utils import convert_timestamp @@ -8,7 +8,6 @@ @dataclass(slots=True) class File(object): id: str - team: str created: int or float or str user: user.User or user.UserSuccinct or str name: str @@ -25,8 +24,53 @@ class File(object): url_private_download: str permalink: str permalink_public: str - shares: List + shares: Dict[Any, Any] + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.id).__name__}') + if self.created and not ( + isinstance(self.created, str) or isinstance(self.created, int) or isinstance(self.created, float)): + raise TypeError( + f'Expected `created` to be of type str or int or float, received {type(self.created).__name__}') + if self.user and not ( + isinstance(self.user, user.User) or isinstance(self.user, user.UserSuccinct) or isinstance(self.user, + str)): + raise TypeError( + f'Expected `user` to be of type User or UserSuccinct or str, received {type(self.user).__name__}') + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.title and not isinstance(self.title, str): + raise TypeError(f'Expected `title` to be of type str, received {type(self.title).__name__}') + if self.mimetype and not isinstance(self.mimetype, str): + raise TypeError(f'Expected `mimetype` to be of type str, received {type(self.mimetype).__name__}') + if self.filetype and not isinstance(self.filetype, str): + raise TypeError(f'Expected `filetype` to be of type str, received {type(self.filetype).__name__}') + if self.pretty_type and not isinstance(self.pretty_type, str): + raise TypeError(f'Expected `pretty_type` to be of type str, received {type(self.pretty_type).__name__}') + if self.editable and not isinstance(self.editable, bool): + raise TypeError(f'Expected `editable` to be of type bool, received {type(self.editable).__name__}') + if self.size and not (isinstance(self.size, str) or isinstance(self.size, int) or isinstance(self.size, float)): + raise TypeError(f'Expected `size` to be of type str or int or float, received {type(self.size).__name__}') + if self.mode and not isinstance(self.mode, str): + raise TypeError(f'Expected `mode` to be of type str, received {type(self.mode).__name__}') + if self.is_public and not isinstance(self.is_public, bool): + raise TypeError(f'Expected `is_public` to be of type bool, received {type(self.is_public).__name__}') + if self.public_url_shared and not isinstance(self.public_url_shared, bool): + raise TypeError( + f'Expected `public_url_shared` to be of type bool, received {type(self.public_url_shared).__name__}') + if self.url_private and not isinstance(self.url_private, str): + raise TypeError(f'Expected `url_private` to be of type str, received {type(self.url_private).__name__}') + if self.url_private_download and not isinstance(self.url_private_download, str): + raise TypeError( + f'Expected `url_private_download` to be of type str, received {type(self.url_private_download).__name__}') + if self.permalink and not isinstance(self.permalink, str): + raise TypeError(f'Expected `permalink` to be of type str, received {type(self.permalink).__name__}') + if self.permalink_public and not isinstance(self.permalink_public, str): + raise TypeError( + f'Expected `permalink_public` to be of type str, received {type(self.permalink_public).__name__}') + if self.shares and not isinstance(self.shares, dict): + raise TypeError(f'Expected `shares` to be of type dict, received {type(self.shares).__name__}') @dataclass(slots=True) class Message(object): @@ -41,6 +85,39 @@ class Message(object): timestamp: int or float or str conversation: conversation.Conversation or conversation.ConversationSuccinct + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.id).__name__}') + if self.team and not isinstance(self.team, str): + raise TypeError(f'Expected `team` to be of type str, received {type(self.team).__name__}') + if self.created and not ( + isinstance(self.created, str) or isinstance(self.created, int) or isinstance(self.created, float)): + raise TypeError( + f'Expected `created` to be of type str or int or float, received {type(self.created).__name__}') + if self.user and not ( + isinstance(self.user, user.User) or isinstance(self.user, user.UserSuccinct) or isinstance(self.user, + str)): + raise TypeError( + f'Expected `user` to be of type User or UserSuccinct or str, received {type(self.user).__name__}') + if self.text and not isinstance(self.text, str): + raise TypeError(f'Expected `text` to be of type str, received {type(self.text).__name__}') + if self.type and not isinstance(self.type, str): + raise TypeError(f'Expected `type` to be of type str, received {type(self.type).__name__}') + if self.permalink and not isinstance(self.permalink, str): + raise TypeError(f'Expected `permalink` to be of type str, received {type(self.permalink).__name__}') + if self.blocks and not isinstance(self.blocks, list): + raise TypeError(f'Expected `blocks` to be of type list, received {type(self.blocks).__name__}') + if self.timestamp and not ( + isinstance(self.timestamp, str) or isinstance(self.timestamp, int) or isinstance(self.timestamp, + float)): + raise TypeError( + f'Expected `timestamp` to be of type str or int or float, received {type(self.timestamp).__name__}') + if self.conversation and not ( + isinstance(self.conversation, conversation.Conversation) or isinstance(self.conversation, + conversation.ConversationSuccinct)): + raise TypeError( + f'Expected `conversation` to be of type Conversation or ConversationSuccinct, received {type(self.conversation).__name__}') + def create_message_from_dict(message_dict: Dict) -> Message: """ Create a Message post object from a dict containing JSON data from @@ -75,10 +152,8 @@ def create_file_from_dict(file_dict: Dict) -> File: Returns: File object for the post """ - return File( id=file_dict.get('id'), - team=file_dict.get('source_team'), created=convert_timestamp(file_dict.get('created')), user=file_dict.get('user'), name=file_dict.get('name'), diff --git a/src/slack_watchman/models/signature.py b/src/slack_watchman/models/signature.py index c5164c3..0ed4c5a 100644 --- a/src/slack_watchman/models/signature.py +++ b/src/slack_watchman/models/signature.py @@ -1,5 +1,6 @@ +import datetime from dataclasses import dataclass -from typing import List +from typing import List, Dict, Any @dataclass(frozen=True, slots=True) @@ -7,6 +8,12 @@ class TestCases: match_cases: List[str] fail_cases: List[str] + def __post_init__(self): + if self.match_cases and not isinstance(self.match_cases, list): + raise TypeError(f'Expected `match_cases` to be of type list, received {type(self.match_cases).__name__}') + if self.fail_cases and not isinstance(self.fail_cases, list): + raise TypeError(f'Expected `fail_cases` to be of type list, received {type(self.fail_cases).__name__}') + @dataclass(frozen=True, slots=True) class Signature: @@ -15,16 +22,79 @@ class Signature: They also contain regex patterns to validate data that is found""" name: str - status: bool + status: str author: str - date: str + date: str | datetime.date | datetime.datetime version: str description: str - severity: int - watchman_apps: List[str] + severity: int or str + watchman_apps: Dict[str, Any] category: str scope: List[str] file_types: List[str] test_cases: TestCases - search_strings: str + search_strings: List[str] patterns: List[str] + + def __post_init__(self): + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.status and not isinstance(self.status, str): + raise TypeError(f'Expected `status` to be of type str, received {type(self.status).__name__}') + if self.author and not isinstance(self.author, str): + raise TypeError(f'Expected `author` to be of type str, received {type(self.author).__name__}') + if self.date and not (isinstance(self.date, datetime.date) + or isinstance(self.date, str) + or isinstance(self.date, datetime.datetime)): + raise TypeError(f'Expected `date` to be of type str, received {type(self.date).__name__}') + if self.version and not isinstance(self.version, str): + raise TypeError(f'Expected `version` to be of type str, received {type(self.version).__name__}') + if self.description and not isinstance(self.description, str): + raise TypeError(f'Expected `description` to be of type str, received {type(self.description).__name__}') + if self.severity and not (isinstance(self.severity, int) or isinstance(self.severity, str)): + raise TypeError(f'Expected `severity` to be of type int or str, received {type(self.severity).__name__}') + if self.watchman_apps and not isinstance(self.watchman_apps, dict): + raise TypeError( + f'Expected `watchman_apps` to be of type dict, received {type(self.watchman_apps).__name__}') + if self.category and not isinstance(self.category, str): + raise TypeError(f'Expected `category` to be of type str, received {type(self.category).__name__}') + if self.scope and not isinstance(self.scope, list): + raise TypeError(f'Expected `scope` to be of type list, received {type(self.scope).__name__}') + if self.file_types and not isinstance(self.file_types, list): + raise TypeError(f'Expected `file_types` to be of type list, received {type(self.file_types).__name__}') + if self.test_cases and not isinstance(self.test_cases, TestCases): + raise TypeError(f'Expected `test_cases` to be of type TestCases, received {type(self.test_cases).__name__}') + if self.search_strings and not isinstance(self.search_strings, list): + raise TypeError( + f'Expected `search_strings` to be of type list, received {type(self.search_strings).__name__}') + if self.patterns and not isinstance(self.patterns, list): + raise TypeError(f'Expected `patterns` to be of type list, received {type(self.patterns).__name__}') + + +def create_from_dict(signature_dict: Dict[str, Any]) -> Signature: + """ Create a Signature object from a dictionary + + Args: + signature_dict: dict/JSON object signature + Returns: + Signature + """ + + return Signature( + name=signature_dict.get('name'), + status=signature_dict.get('status'), + author=signature_dict.get('author'), + date=signature_dict.get('date'), + version=signature_dict.get('version'), + description=signature_dict.get('description'), + severity=signature_dict.get('severity'), + watchman_apps=signature_dict.get('watchman_apps'), + category=signature_dict.get('watchman_apps', {}).get('slack_std', {}).get('category'), + scope=signature_dict.get('watchman_apps', {}).get('slack_std', {}).get('scope'), + file_types=signature_dict.get('watchman_apps', {}).get('slack_std', {}).get('file_types'), + test_cases=TestCases( + match_cases=signature_dict.get('test_cases', {}).get('match_cases'), + fail_cases=signature_dict.get('test_cases', {}).get('fail_cases') + ), + search_strings=signature_dict.get('watchman_apps', {}).get('slack_std', {}).get('search_strings'), + patterns=signature_dict.get('patterns')) diff --git a/src/slack_watchman/models/user.py b/src/slack_watchman/models/user.py index 5dfbd54..81f5cb0 100644 --- a/src/slack_watchman/models/user.py +++ b/src/slack_watchman/models/user.py @@ -20,7 +20,7 @@ class User(object): display_name: str tz: str tz_label: str - tz_offset: str + tz_offset: str or int title: str is_admin: bool is_owner: bool @@ -31,6 +31,54 @@ class User(object): updated: int or float or str has_2fa: bool + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.name).__name__}') + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.email and not isinstance(self.email, str): + raise TypeError(f'Expected `email` to be of type str, received {type(self.name).__name__}') + if self.deleted and not isinstance(self.deleted, bool): + raise TypeError(f'Expected `deleted` to be of type bool, received {type(self.name).__name__}') + if self.real_name and not isinstance(self.real_name, str): + raise TypeError(f'Expected `real_name` to be of type str, received {type(self.name).__name__}') + if self.first_name and not isinstance(self.first_name, str): + raise TypeError(f'Expected `first_name` to be of type str, received {type(self.name).__name__}') + if self.last_name and not isinstance(self.last_name, str): + raise TypeError(f'Expected `last_name` to be of type str, received {type(self.name).__name__}') + if self.phone and not isinstance(self.phone, str): + raise TypeError(f'Expected `phone` to be of type str, received {type(self.name).__name__}') + if self.skype and not isinstance(self.skype, str): + raise TypeError(f'Expected `skype` to be of type str, received {type(self.name).__name__}') + if self.display_name and not isinstance(self.display_name, str): + raise TypeError(f'Expected `display_name` to be of type str, received {type(self.name).__name__}') + if self.tz and not isinstance(self.tz, str): + raise TypeError(f'Expected `tz` to be of type str, received {type(self.name).__name__}') + if self.tz_label and not isinstance(self.tz_label, str): + raise TypeError(f'Expected `tz_label` to be of type str, received {type(self.name).__name__}') + if self.tz_offset and not (isinstance(self.tz_offset, str) or isinstance(self.tz_offset, int)): + raise TypeError(f'Expected `tz_offset` to be of type str or int, received {type(self.name).__name__}') + if self.title and not isinstance(self.title, str): + raise TypeError(f'Expected `title` to be of type str, received {type(self.name).__name__}') + if self.is_admin and not isinstance(self.is_admin, bool): + raise TypeError(f'Expected `is_admin` to be of type bool, received {type(self.name).__name__}') + if self.is_owner and not isinstance(self.is_owner, bool): + raise TypeError(f'Expected `is_owner` to be of type bool, received {type(self.name).__name__}') + if self.is_primary_owner and not isinstance(self.is_primary_owner, bool): + raise TypeError(f'Expected `is_primary_owner` to be of type bool, received {type(self.name).__name__}') + if self.is_restricted and not isinstance(self.is_restricted, bool): + raise TypeError(f'Expected `is_restricted` to be of type bool, received {type(self.name).__name__}') + if self.is_ultra_restricted and not isinstance(self.is_ultra_restricted, bool): + raise TypeError(f'Expected `is_ultra_restricted` to be of type bool, received {type(self.name).__name__}') + if self.is_bot and not isinstance(self.is_bot, bool): + raise TypeError(f'Expected `is_bot` to be of type bool, received {type(self.name).__name__}') + if self.updated and not (isinstance(self.updated, int) or + isinstance(self.updated, float) or + isinstance(self.updated, str)): + raise TypeError(f'Expected `updated` to be of type int, float or str, received {type(self.name).__name__}') + if self.has_2fa and not isinstance(self.has_2fa, bool): + raise TypeError(f'Expected `has_2fa` to be of type bool, received {type(self.name).__name__}') + @dataclass(slots=True) class UserSuccinct(object): @@ -40,9 +88,23 @@ class UserSuccinct(object): name: str email: str display_name: str - has_2fa: str + has_2fa: bool is_admin: str + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.name).__name__}') + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.email and not isinstance(self.email, str): + raise TypeError(f'Expected `email` to be of type str, received {type(self.name).__name__}') + if self.display_name and not isinstance(self.display_name, str): + raise TypeError(f'Expected `display_name` to be of type str, received {type(self.name).__name__}') + if self.has_2fa and not isinstance(self.has_2fa, bool): + raise TypeError(f'Expected `has_2fa` to be of type bool, received {type(self.name).__name__}') + if self.is_admin and not isinstance(self.is_admin, bool): + raise TypeError(f'Expected `is_admin` to be of type bool, received {type(self.name).__name__}') + def create_from_dict(user_dict: Dict, verbose: bool) -> User or UserSuccinct: @@ -65,13 +127,13 @@ def create_from_dict(user_dict: Dict, tz=user_dict.get('tz'), tz_label=user_dict.get('tz_label'), tz_offset=user_dict.get('tz_offset'), - title=user_dict.get('profile').get('title'), - phone=user_dict.get('profile').get('phone'), - skype=user_dict.get('profile').get('skype'), - display_name=user_dict.get('profile').get('display_name'), - email=user_dict.get('profile').get('email'), - first_name=user_dict.get('profile').get('first_name'), - last_name=user_dict.get('profile').get('last_name'), + title=user_dict.get('profile', {}).get('title'), + phone=user_dict.get('profile', {}).get('phone'), + skype=user_dict.get('profile', {}).get('skype'), + display_name=user_dict.get('profile', {}).get('display_name'), + email=user_dict.get('profile', {}).get('email'), + first_name=user_dict.get('profile', {}).get('first_name'), + last_name=user_dict.get('profile', {}).get('last_name'), is_admin=user_dict.get('is_admin'), is_owner=user_dict.get('is_owner'), is_primary_owner=user_dict.get('is_primary_owner'), @@ -85,8 +147,8 @@ def create_from_dict(user_dict: Dict, return UserSuccinct( id=user_dict.get('id'), name=user_dict.get('name'), - display_name=user_dict.get('profile').get('display_name'), + display_name=user_dict.get('profile', {}).get('display_name'), has_2fa=user_dict.get('has_2fa'), is_admin=user_dict.get('is_admin'), - email=user_dict.get('profile').get('email') + email=user_dict.get('profile', {}).get('email') ) diff --git a/src/slack_watchman/models/workspace.py b/src/slack_watchman/models/workspace.py index e2b4586..7ce3451 100644 --- a/src/slack_watchman/models/workspace.py +++ b/src/slack_watchman/models/workspace.py @@ -18,6 +18,18 @@ class Workspace(object): enterprise_domain: Optional[str] = None enterprise_name: Optional[str] = None + def __post_init__(self): + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.name).__name__}') + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.domain and not isinstance(self.domain, str): + raise TypeError(f'Expected `domain` to be of type str, received {type(self.name).__name__}') + if self.url and not isinstance(self.url, str): + raise TypeError(f'Expected `url` to be of type str, received {type(self.name).__name__}') + if self.email_domain and not isinstance(self.email_domain, str): + raise TypeError(f'Expected `email_domain` to be of type str, received {type(self.name).__name__}') + def create_from_dict(workspace_dict: Dict) -> Workspace: """ Return a Workspace object based off an input dictionary diff --git a/src/slack_watchman/signature_downloader.py b/src/slack_watchman/signature_downloader.py index f0acc8a..ff50b4b 100644 --- a/src/slack_watchman/signature_downloader.py +++ b/src/slack_watchman/signature_downloader.py @@ -9,7 +9,7 @@ import yaml from slack_watchman.loggers import JSONLogger, StdoutLogger -from slack_watchman.models.signature import Signature, TestCases +from slack_watchman.models.signature import Signature, create_from_dict SIGNATURE_URL = 'https://github.com/PaperMtn/watchman-signatures/archive/main.zip' @@ -61,7 +61,8 @@ def download_signatures(self) -> List[Signature]: self.logger.log('DEBUG', traceback.format_exc()) sys.exit(1) - def _process_signature(self, signature_data: bytes) -> List[Signature]: + @staticmethod + def _process_signature(signature_data: bytes) -> List[Signature]: """ Process a signature data bytes object into a list of Signature objects. This function takes a bytes object containing signature data, parses it into a dictionary, @@ -77,22 +78,5 @@ def _process_signature(self, signature_data: bytes) -> List[Signature]: output = [] for sig in signature_dict.get('signatures'): if 'slack_std' in sig.get('watchman_apps') and sig.get('status') == 'enabled': - output.append(Signature( - name=sig.get('name'), - status=sig.get('status'), - author=sig.get('author'), - date=sig.get('date'), - version=sig.get('version'), - description=sig.get('description'), - severity=sig.get('severity'), - watchman_apps=sig.get('watchman_apps'), - category=sig.get('watchman_apps').get('slack_std').get('category'), - scope=sig.get('watchman_apps').get('slack_std').get('scope'), - file_types=sig.get('watchman_apps').get('slack_std').get('file_types'), - test_cases=TestCases( - match_cases=sig.get('test_cases').get('match_cases'), - fail_cases=sig.get('test_cases').get('fail_cases') - ), - search_strings=sig.get('watchman_apps').get('slack_std').get('search_strings'), - patterns=sig.get('patterns'))) + output.append(create_from_dict(sig)) return output From 598893e50adcf2b5eaf6842978b91db3b3ee0649 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 20:40:52 +0100 Subject: [PATCH 06/18] Version bump --- CHANGELOG.md | 7 ++++ poetry.lock | 86 +++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 229cda5..ef99890 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +## [4.3.0] - 2024-10-x +### Changed +- Timestamps are now in UTC across all logging for consistency + +### Fixed +- Fixed a few bugs with models for User, Workspace and Messages not picking up all values + ## [4.2.0] - 2024-09-27 ### Added - Added enumeration of conversations with populated Canvases attached. These can contain sensitive information, and are worth reviewing. diff --git a/poetry.lock b/poetry.lock index e388e31..b1b005a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -142,6 +142,20 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "idna" version = "3.10" @@ -156,6 +170,65 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "pyyaml" version = "6.0.2" @@ -250,6 +323,17 @@ files = [ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] +[[package]] +name = "tomli" +version = "2.0.2" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + [[package]] name = "urllib3" version = "2.2.3" @@ -270,4 +354,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">=3.10" -content-hash = "0517ee4fc0d0ec1190dc04543ad705e131b49a72da032b6783f6511345b0796f" +content-hash = "c411ec8eea25560062fa813de399c0540fe6ce92a734d89ce08cf495e1d45823" diff --git a/pyproject.toml b/pyproject.toml index 0c73196..49851eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ colorama = "^0.4.6" pyyaml = "^6.0.2" requests = "^2.32.3" beautifulsoup4 = "^4.12.3" +pytest = "^8.3.3" [tool.poetry.scripts] slack-watchman = "slack_watchman:main" From d6095a731891555a3ff492c6f0227d4c091935ad Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 20:43:11 +0100 Subject: [PATCH 07/18] Unit tests created for all models --- tests/unit/models/test_unit_conversation.py | 156 +++++++++ tests/unit/models/test_unit_post.py | 331 ++++++++++++++++++++ tests/unit/models/test_unit_signature.py | 75 +++++ tests/unit/models/test_unit_user.py | 104 ++++++ tests/unit/models/test_unit_workspace.py | 81 +++++ 5 files changed, 747 insertions(+) create mode 100644 tests/unit/models/test_unit_conversation.py create mode 100644 tests/unit/models/test_unit_post.py create mode 100644 tests/unit/models/test_unit_signature.py create mode 100644 tests/unit/models/test_unit_user.py create mode 100644 tests/unit/models/test_unit_workspace.py diff --git a/tests/unit/models/test_unit_conversation.py b/tests/unit/models/test_unit_conversation.py new file mode 100644 index 0000000..6d4f8ad --- /dev/null +++ b/tests/unit/models/test_unit_conversation.py @@ -0,0 +1,156 @@ +import pytest +from slack_watchman.models import conversation +from slack_watchman.utils import convert_timestamp + +CONVERSATION_DICT = { + 'id': 'C1234567890', + 'name': 'Example Conversation', + 'created': 1643723400, + 'num_members': 10, + 'is_general': True, + 'is_private': False, + 'is_im': False, + 'is_mpim': False, + 'is_archived': False, + 'creator': 'U1234567890', + 'name_normalized': 'example-conversation', + 'is_ext_shared': False, + 'is_org_shared': False, + 'is_shared': False, + 'is_channel': True, + 'is_group': False, + 'is_pending_ext_shared': False, + 'previous_names': ['old-name'], + 'is_member': True, + 'purpose': { + 'value': 'This is an example conversation' + }, + 'properties': { + 'canvas': { + 'is_empty': False, + 'file_id': 'CAN1234567890' + } + }, + 'canvas_empty': False, + 'canvas_id': 'some-canvas-id', + 'topic': {'value': 'This is an example topic'} +} + +CONVERSATION_SUCCINCT_DICT = { + 'id': 'C1234567890', + 'name': 'Example Conversation', + 'created': 1643723400, + 'num_members': 10, + 'is_private': False, + 'is_im': False, + 'is_mpim': False, + 'is_archived': False, + 'properties': { + 'canvas': { + 'is_empty': False, + 'file_id': 'CAN1234567890' + } + }, + 'creator': 'U1234567890' +} + +CONVERSATION_MISSING_FIELDS = { + 'id': 'C1234567890', + 'name': 'Example Conversation', +} + + +@pytest.fixture +def example_conversation(): + return conversation.create_from_dict(CONVERSATION_DICT, verbose=True) + + +@pytest.fixture +def example_conversation_succinct(): + return conversation.create_from_dict(CONVERSATION_SUCCINCT_DICT, verbose=False) + + +@pytest.fixture +def example_conversation_missing_fields(): + return conversation.create_from_dict(CONVERSATION_MISSING_FIELDS, verbose=True) + + +def test_conversation_initialisation(example_conversation): + # Test that the Conversation object is of the correct type + assert isinstance(example_conversation, conversation.Conversation) + + # Test that the Conversation object has the correct attributes + assert example_conversation.id == CONVERSATION_DICT.get('id') + assert example_conversation.name == CONVERSATION_DICT.get('name') + assert example_conversation.created == convert_timestamp(CONVERSATION_DICT.get('created')) + assert example_conversation.num_members == CONVERSATION_DICT.get('num_members') + assert example_conversation.is_general == CONVERSATION_DICT.get('is_general') + assert example_conversation.is_private == CONVERSATION_DICT.get('is_private') + assert example_conversation.is_im == CONVERSATION_DICT.get('is_im') + assert example_conversation.is_mpim == CONVERSATION_DICT.get('is_mpim') + assert example_conversation.is_archived == CONVERSATION_DICT.get('is_archived') + assert example_conversation.creator == CONVERSATION_DICT.get('creator') + assert example_conversation.name_normalized == CONVERSATION_DICT.get('name_normalized') + assert example_conversation.is_ext_shared == CONVERSATION_DICT.get('is_ext_shared') + assert example_conversation.is_org_shared == CONVERSATION_DICT.get('is_org_shared') + assert example_conversation.is_shared == CONVERSATION_DICT.get('is_shared') + assert example_conversation.is_channel == CONVERSATION_DICT.get('is_channel') + assert example_conversation.is_group == CONVERSATION_DICT.get('is_group') + assert example_conversation.is_pending_ext_shared == CONVERSATION_DICT.get('is_pending_ext_shared') + assert example_conversation.previous_names == CONVERSATION_DICT.get('previous_names') + assert example_conversation.is_member == CONVERSATION_DICT.get('is_member') + assert example_conversation.purpose == CONVERSATION_DICT.get('purpose').get('value') + assert example_conversation.canvas_empty == CONVERSATION_DICT.get('properties').get('canvas').get('is_empty') + assert example_conversation.canvas_id == CONVERSATION_DICT.get('properties').get('canvas').get('file_id') + assert example_conversation.topic == CONVERSATION_DICT.get('topic').get('value') + + +def test_conversation_succinct_initialisation(example_conversation_succinct): + # Test that the Conversation object is of the correct type + assert isinstance(example_conversation_succinct, conversation.ConversationSuccinct) + + # Test that the Conversation object has the correct attributes + assert example_conversation_succinct.id == CONVERSATION_SUCCINCT_DICT.get('id') + assert example_conversation_succinct.name == CONVERSATION_SUCCINCT_DICT.get('name') + assert example_conversation_succinct.created == convert_timestamp(CONVERSATION_SUCCINCT_DICT.get('created')) + assert example_conversation_succinct.num_members == CONVERSATION_SUCCINCT_DICT.get('num_members') + assert example_conversation_succinct.is_private == CONVERSATION_SUCCINCT_DICT.get('is_private') + assert example_conversation_succinct.is_im == CONVERSATION_SUCCINCT_DICT.get('is_im') + assert example_conversation_succinct.is_mpim == CONVERSATION_SUCCINCT_DICT.get('is_mpim') + assert example_conversation_succinct.is_archived == CONVERSATION_SUCCINCT_DICT.get('is_archived') + assert example_conversation_succinct.creator == CONVERSATION_SUCCINCT_DICT.get('creator') + assert example_conversation_succinct.canvas_empty == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas').get('is_empty') + assert example_conversation_succinct.canvas_id == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas').get('file_id') + + +def test_conversation_missing_fields(example_conversation_missing_fields): + # Test that the Conversation object is of the correct type + assert isinstance(example_conversation_missing_fields, conversation.Conversation) + + # Test that the Conversation object has the correct attributes + assert example_conversation_missing_fields.id is CONVERSATION_MISSING_FIELDS.get('id') + assert example_conversation_missing_fields.name == CONVERSATION_MISSING_FIELDS.get('name') + assert example_conversation_missing_fields.created is None + assert example_conversation_missing_fields.num_members is None + assert example_conversation_missing_fields.is_private is None + assert example_conversation_missing_fields.is_im is None + assert example_conversation_missing_fields.is_mpim is None + assert example_conversation_missing_fields.is_archived is None + assert example_conversation_missing_fields.creator is None + assert example_conversation_missing_fields.canvas_empty is None + assert example_conversation_missing_fields.canvas_id is None + + +def test_field_type(): + # Test that correct error is raised when id is not a string + conversation_dict = CONVERSATION_DICT + conversation_dict['id'] = 123 + with pytest.raises(TypeError): + test_conversation = conversation.create_from_dict(conversation_dict, verbose=True) + + +def test_missing_field(): + temp_conversation_dict = CONVERSATION_DICT.copy() + del temp_conversation_dict['id'] + test_conversation = conversation.create_from_dict(temp_conversation_dict, verbose=True) + assert test_conversation.id is None diff --git a/tests/unit/models/test_unit_post.py b/tests/unit/models/test_unit_post.py new file mode 100644 index 0000000..906c17a --- /dev/null +++ b/tests/unit/models/test_unit_post.py @@ -0,0 +1,331 @@ +import pytest +from slack_watchman.models import post, user, conversation +from slack_watchman.utils import convert_timestamp + +CONVERSATION_DICT = { + 'id': 'C1234567890', + 'name': 'Example Conversation', + 'created': 1643723400, + 'num_members': 10, + 'is_general': True, + 'is_private': False, + 'is_im': False, + 'is_mpim': False, + 'is_archived': False, + 'creator': 'U1234567890', + 'name_normalized': 'example-conversation', + 'is_ext_shared': False, + 'is_org_shared': False, + 'is_shared': False, + 'is_channel': True, + 'is_group': False, + 'is_pending_ext_shared': False, + 'previous_names': ['old-name'], + 'is_member': True, + 'purpose': { + 'value': 'This is an example conversation' + }, + 'properties': { + 'canvas': { + 'is_empty': False, + 'file_id': 'CAN1234567890' + } + }, + 'canvas_empty': False, + 'canvas_id': 'some-canvas-id', + 'topic': {'value': 'This is an example topic'} +} + +CONVERSATION_SUCCINCT_DICT = { + 'id': 'C1234567890', + 'name': 'Example Conversation', + 'created': 1643723400, + 'num_members': 10, + 'is_private': False, + 'is_im': False, + 'is_mpim': False, + 'is_archived': False, + 'properties': { + 'canvas': { + 'is_empty': False, + 'file_id': 'CAN1234567890' + } + }, + 'creator': 'U1234567890' +} + +USER_DICT = { + 'id': 'U1234567890', + 'name': 'Robert Baratheon', + 'deleted': False, + 'real_name': 'Robert Baratheon', + 'tz': 'Westeros/Kings Landing', + 'tz_label': 'Westeros Standard Time', + 'tz_offset': -18000, + 'profile': { + 'title': 'King of the Andals and the First Men', + 'phone': '+447123456789', + 'skype': 'bobbyb', + 'display_name': 'Robert Baratheon', + 'email': 'r.baratheon@me.com', + 'first_name': 'Robert', + 'last_name': 'Baratheon', + }, + 'is_admin': True, + 'is_owner': False, + 'is_primary_owner': False, + 'is_restricted': False, + 'is_ultra_restricted': False, + 'is_bot': False, + 'updated': 1643723400, + 'has_2fa': True +} + +USER_SUCCINCT_DICT = { + 'id': 'U1234567890', + 'name': 'Joe Bloggs' +} + +MESSAGE_DICT = { + 'iid': '1234567890', + 'team': 'T1234567890', + 'ts': '1643723400.1234567890', + 'conversation': None, + 'user': None, + 'text': 'This is a test message', + 'type': 'message', + 'permalink': 'https://example.slack.com/archives/C1234567890/p1234567890', + 'blocks': [] +} + +FILE_DICT = { + 'id': 'F1234567890', + 'created': 1643723400, + 'user': USER_DICT, + 'name': 'example_file.txt', + 'title': 'Example File', + 'mimetype': 'text/plain', + 'filetype': 'txt', + 'pretty_type': 'Text File', + 'editable': True, + 'size': 1024, + 'mode': 'hosted', + 'is_public': False, + 'public_url_shared': False, + 'url_private': 'https://example.slack.com/files/U1234567890/F1234567890/example_file.txt', + 'url_private_download': 'https://example.slack.com/files/U1234567890/F1234567890/example_file.txt?mode=download', + 'permalink': 'https://example.slack.com/files/U1234567890/F1234567890/example_file.txt', + 'permalink_public': 'https://example.slack.com/files/U1234567890/F1234567890/example_file.txt', + 'shares': {} +} + +MESSAGE_DICT_MISSING_FIELDS = { + 'iid': '1234567890', + 'team': 'T1234567890', + 'ts': '1643723400.1234567890', + 'text': 'This is a test message', + 'type': 'message', + 'blocks': [] +} + +FILE_DICT_MISSING_FIELDS = { + 'id': 'F1234567890', + 'name': 'example_file.txt', + 'title': 'Example File', + 'mimetype': 'text/plain', + 'filetype': 'txt', +} + + +@pytest.fixture +def example_user(): + return user.create_from_dict(USER_DICT, verbose=True) + + +@pytest.fixture +def example_user_succinct(): + return user.create_from_dict(USER_SUCCINCT_DICT, verbose=False) + + +@pytest.fixture +def example_conversation(): + return conversation.create_from_dict(CONVERSATION_DICT, verbose=False) + + +@pytest.fixture +def example_conversation_succinct(): + return conversation.create_from_dict(CONVERSATION_SUCCINCT_DICT, verbose=True) + + +@pytest.fixture +def example_message(example_user, example_conversation): + temp_message_dict = MESSAGE_DICT.copy() + temp_message_dict['conversation'] = example_conversation + temp_message_dict['user'] = example_user + return post.create_message_from_dict(temp_message_dict) + + +@pytest.fixture +def example_message_succinct_values(example_user_succinct, example_conversation_succinct): + temp_message_dict = MESSAGE_DICT.copy() + temp_message_dict['conversation'] = example_conversation_succinct + temp_message_dict['user'] = example_user_succinct + return post.create_message_from_dict(temp_message_dict) + + +@pytest.fixture +def example_file(example_user): + temp_file_dict = FILE_DICT.copy() + temp_file_dict['user'] = example_user + return post.create_file_from_dict(temp_file_dict) + + +@pytest.fixture +def example_file_succinct_values(example_user_succinct): + temp_file_dict = FILE_DICT.copy() + temp_file_dict['user'] = example_user_succinct + return post.create_file_from_dict(temp_file_dict) + + +@pytest.fixture +def example_file_missing_fields(): + return post.create_file_from_dict(FILE_DICT_MISSING_FIELDS) + + +@pytest.fixture +def example_message_missing_fields(example_user, example_conversation): + return post.create_message_from_dict(MESSAGE_DICT_MISSING_FIELDS) + + +def test_message_initialisation(example_message, example_user, example_conversation): + # Test that the Message object is of the correct type + assert isinstance(example_message, post.Message) + + # Test that the Message object has the correct attributes + assert example_message.id == MESSAGE_DICT.get('iid') + assert example_message.team == MESSAGE_DICT.get('team') + assert example_message.created == convert_timestamp(MESSAGE_DICT.get('ts')) + assert example_message.user == example_user + assert example_message.text == MESSAGE_DICT.get('text') + assert example_message.type == MESSAGE_DICT.get('type') + assert example_message.permalink == MESSAGE_DICT.get('permalink') + assert example_message.blocks == MESSAGE_DICT.get('blocks') + assert example_message.timestamp == MESSAGE_DICT.get('ts') + assert example_message.conversation == example_conversation + + +def test_message_initialisation_succinct(example_message_succinct_values, + example_user_succinct, + example_conversation_succinct): + # Test that the Message object is of the correct type, even when using succinct values + assert isinstance(example_message_succinct_values, post.Message) + + # Test that the Message object has the correct attributes + assert example_message_succinct_values.id == MESSAGE_DICT.get('iid') + assert example_message_succinct_values.team == MESSAGE_DICT.get('team') + assert example_message_succinct_values.created == convert_timestamp(MESSAGE_DICT.get('ts')) + assert example_message_succinct_values.user == example_user_succinct + assert example_message_succinct_values.text == MESSAGE_DICT.get('text') + assert example_message_succinct_values.type == MESSAGE_DICT.get('type') + assert example_message_succinct_values.permalink == MESSAGE_DICT.get('permalink') + assert example_message_succinct_values.blocks == MESSAGE_DICT.get('blocks') + assert example_message_succinct_values.timestamp == MESSAGE_DICT.get('ts') + assert example_message_succinct_values.conversation == example_conversation_succinct + + +def test_message_initialisation_missing_fields(example_message_missing_fields): + # Test that the Message object is of the correct type + assert isinstance(example_message_missing_fields, post.Message) + + # Test that the Message object has the correct attributes + assert example_message_missing_fields.id is MESSAGE_DICT_MISSING_FIELDS.get('iid') + assert example_message_missing_fields.team is MESSAGE_DICT_MISSING_FIELDS.get('team') + assert example_message_missing_fields.text is MESSAGE_DICT_MISSING_FIELDS.get('text') + assert example_message_missing_fields.type is MESSAGE_DICT_MISSING_FIELDS.get('type') + assert example_message_missing_fields.blocks is MESSAGE_DICT_MISSING_FIELDS.get('blocks') + assert example_message_missing_fields.permalink is None + assert example_message_missing_fields.timestamp is MESSAGE_DICT_MISSING_FIELDS.get('ts') + assert example_message_missing_fields.conversation is None + assert example_message_missing_fields.user is None + + +def test_message_field_type(): + # Test that the correct error is raised when id is not a string + message_dict = MESSAGE_DICT.copy() + message_dict['iid'] = 123 + with pytest.raises(TypeError): + post.create_message_from_dict(message_dict) + + +def test_file_initialisation(example_file, example_user): + # Test that the File object is of the correct type + assert isinstance(example_file, post.File) + + # Test that the File object has the correct attributes + assert example_file.id == FILE_DICT.get('id') + assert example_file.created == convert_timestamp(FILE_DICT.get('created')) + assert example_file.user == example_user + assert example_file.name == FILE_DICT.get('name') + assert example_file.title == FILE_DICT.get('title') + assert example_file.mimetype == FILE_DICT.get('mimetype') + assert example_file.filetype == FILE_DICT.get('filetype') + assert example_file.pretty_type == FILE_DICT.get('pretty_type') + assert example_file.editable == FILE_DICT.get('editable') + assert example_file.size == FILE_DICT.get('size') + assert example_file.mode == FILE_DICT.get('mode') + assert example_file.is_public == FILE_DICT.get('is_public') + assert example_file.public_url_shared == FILE_DICT.get('public_url_shared') + assert example_file.url_private == FILE_DICT.get('url_private') + assert example_file.url_private_download == FILE_DICT.get('url_private_download') + assert example_file.permalink == FILE_DICT.get('permalink') + assert example_file.permalink_public == FILE_DICT.get('permalink_public') + assert example_file.shares == FILE_DICT.get('shares') + + +def test_file_initialisation_succinct(example_file_succinct_values, example_user_succinct): + # Test that the File object is of the correct type, even when using succinct values + assert isinstance(example_file_succinct_values, post.File) + + # Test that the File object has the correct attributes + assert example_file_succinct_values.id == FILE_DICT.get('id') + assert example_file_succinct_values.created == convert_timestamp(FILE_DICT.get('created')) + assert example_file_succinct_values.user == example_user_succinct + assert example_file_succinct_values.name == FILE_DICT.get('name') + assert example_file_succinct_values.title == FILE_DICT.get('title') + assert example_file_succinct_values.mimetype == FILE_DICT.get('mimetype') + assert example_file_succinct_values.filetype == FILE_DICT.get('filetype') + assert example_file_succinct_values.pretty_type == FILE_DICT.get('pretty_type') + + +def test_file_initialisation_missing_fields(example_file_missing_fields): + # Test that the File object is of the correct type + assert isinstance(example_file_missing_fields, post.File) + + # Test that the File object has the correct attributes + assert example_file_missing_fields.id is FILE_DICT_MISSING_FIELDS.get('id') + assert example_file_missing_fields.created is convert_timestamp(FILE_DICT_MISSING_FIELDS.get('created')) + assert example_file_missing_fields.name is FILE_DICT_MISSING_FIELDS.get('name') + assert example_file_missing_fields.title is FILE_DICT_MISSING_FIELDS.get('title') + assert example_file_missing_fields.mimetype is FILE_DICT_MISSING_FIELDS.get('mimetype') + assert example_file_missing_fields.filetype is FILE_DICT_MISSING_FIELDS.get('filetype') + assert example_file_missing_fields.pretty_type is FILE_DICT_MISSING_FIELDS.get('pretty_type') + assert example_file_missing_fields.editable is None + assert example_file_missing_fields.size is None + assert example_file_missing_fields.mode is None + assert example_file_missing_fields.is_public is None + assert example_file_missing_fields.public_url_shared is None + assert example_file_missing_fields.url_private is None + assert example_file_missing_fields.url_private_download is None + assert example_file_missing_fields.permalink is None + assert example_file_missing_fields.permalink_public is None + assert example_file_missing_fields.shares is None + assert example_file_missing_fields.user is None + + +def test_file_field_type(): + # Test that the correct error is raised when id is not a string + file_dict = FILE_DICT.copy() + file_dict['id'] = 123 + with pytest.raises(TypeError): + post.create_file_from_dict(file_dict) + diff --git a/tests/unit/models/test_unit_signature.py b/tests/unit/models/test_unit_signature.py new file mode 100644 index 0000000..0134e1d --- /dev/null +++ b/tests/unit/models/test_unit_signature.py @@ -0,0 +1,75 @@ +import pytest +from slack_watchman.models import signature + +SIGNATURE_DICT = { + 'name': 'Akamai API Access Tokens', + 'status': 'enabled', + 'author': 'PaperMtn', + 'date': '2023-12-22', + 'description': 'Detects exposed Akamai API Access tokens', + 'severity': '90', + 'notes': None, + 'references': None, + 'watchman_apps': { + 'slack_std': { + 'category': 'secrets', + 'scope': [ + 'messages' + ], + 'file_types': None, + 'search_strings': [ + 'akab-' + ] + } + }, + 'test_cases': { + 'match_cases': [ + 'client_token: akab-rWdcwwASNbe9fcGk-00qwecOueticOXxA' + ], + 'fail_cases': [ + 'host: akab-fakehost.akamaiapis.net' + ] + }, + 'patterns': [ + 'akab-[0-9a-zA-Z]{16}-[0-9a-zA-Z]{16}' + ] +} + + +@pytest.fixture +def example_signature(): + return signature.create_from_dict(SIGNATURE_DICT) + + +def test_signature_initialisation(example_signature): + # Test that the signature object is initialised + assert isinstance(example_signature, signature.Signature) + + # Test that the signature object has the correct attributes + assert example_signature.name == SIGNATURE_DICT.get('name') + assert example_signature.status == SIGNATURE_DICT.get('status') + assert example_signature.author == SIGNATURE_DICT.get('author') + assert example_signature.date == SIGNATURE_DICT.get('date') + assert example_signature.description == SIGNATURE_DICT.get('description') + assert example_signature.severity == SIGNATURE_DICT.get('severity') + assert example_signature.watchman_apps == SIGNATURE_DICT.get('watchman_apps') + assert example_signature.category == SIGNATURE_DICT.get('watchman_apps').get('slack_std').get('category') + + +def test_field_type(): + # Test that correct error is raised when name is not a string + signature_dict = SIGNATURE_DICT + signature_dict['name'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + +def test_missing_field(): + temp_signature_dict = SIGNATURE_DICT.copy() + del temp_signature_dict['name'] + test_signature = signature.create_from_dict(temp_signature_dict) + assert test_signature.name is None + + del temp_signature_dict['watchman_apps'] + test_signature = signature.create_from_dict(temp_signature_dict) + assert test_signature.watchman_apps is None diff --git a/tests/unit/models/test_unit_user.py b/tests/unit/models/test_unit_user.py new file mode 100644 index 0000000..21a1dc6 --- /dev/null +++ b/tests/unit/models/test_unit_user.py @@ -0,0 +1,104 @@ +import pytest +from slack_watchman.models import user + +USER_DICT = { + 'id': 'U1234567890', + 'name': 'Robert Baratheon', + 'deleted': False, + 'real_name': 'Robert Baratheon', + 'tz': 'Westeros/Kings Landing', + 'tz_label': 'Westeros Standard Time', + 'tz_offset': -18000, + 'profile': { + 'title': 'King of the Andals and the First Men', + 'phone': '+447123456789', + 'skype': 'bobbyb', + 'display_name': 'Robert Baratheon', + 'email': 'r.baratheon@me.com', + 'first_name': 'Robert', + 'last_name': 'Baratheon', + }, + 'is_admin': True, + 'is_owner': False, + 'is_primary_owner': False, + 'is_restricted': False, + 'is_ultra_restricted': False, + 'is_bot': False, + 'updated': 1643723400, + 'has_2fa': True +} + +USER_SUCCINCT_DICT = { + 'id': 'U1234567890', + 'name': 'Joe Bloggs' + } + +@pytest.fixture +def example_user(): + return user.create_from_dict(USER_DICT, verbose=True) + + +@pytest.fixture +def example_user_succinct(): + return user.create_from_dict(USER_DICT, verbose=False) + + +def test_user_initialisation(example_user): + + # Test that the User object is of the correct type + assert isinstance(example_user, user.User) + + # Test that the User object has the correct attributes + assert example_user.id == USER_DICT.get('id') + assert example_user.name == USER_DICT.get('name') + assert example_user.deleted == USER_DICT.get('deleted') + assert example_user.real_name == USER_DICT.get('real_name') + assert example_user.tz == USER_DICT.get('tz') + assert example_user.tz_label == USER_DICT.get('tz_label') + assert example_user.tz_offset == USER_DICT.get('tz_offset') + assert example_user.title == USER_DICT.get('profile').get('title') + assert example_user.phone == USER_DICT.get('profile').get('phone') + assert example_user.skype == USER_DICT.get('profile').get('skype') + assert example_user.display_name == USER_DICT.get('profile').get('display_name') + assert example_user.email == USER_DICT.get('profile').get('email') + assert example_user.first_name == USER_DICT.get('profile').get('first_name') + assert example_user.last_name == USER_DICT.get('profile').get('last_name') + assert example_user.is_admin == USER_DICT.get('is_admin') + assert example_user.is_owner == USER_DICT.get('is_owner') + assert example_user.is_primary_owner == USER_DICT.get('is_primary_owner') + assert example_user.is_restricted == USER_DICT.get('is_restricted') + assert example_user.is_ultra_restricted == USER_DICT.get('is_ultra_restricted') + assert example_user.is_bot == USER_DICT.get('is_bot') + assert example_user.updated == user.convert_timestamp(USER_DICT.get('updated')) + assert example_user.has_2fa == USER_DICT.get('has_2fa') + + +def test_user_succinct_initialisation(example_user_succinct): + + # Test that the UserSuccinct object is of the correct type + assert isinstance(example_user_succinct, user.UserSuccinct) + + # Test that the UserSuccinct object has the correct attributes + assert example_user_succinct.id == USER_DICT.get('id') + assert example_user_succinct.name == USER_DICT.get('name') + assert example_user_succinct.display_name == USER_DICT.get('profile').get('display_name') + assert example_user_succinct.has_2fa == USER_DICT.get('has_2fa') + assert example_user_succinct.is_admin == USER_DICT.get('is_admin') + assert example_user_succinct.email == USER_DICT.get('profile').get('email') + + +def test_user_creation_with_missing_fields(): + # Test that a User object is created + user_obj = user.create_from_dict(USER_SUCCINCT_DICT, verbose=True) + + # Test that the User object is of the correct type + assert isinstance(user_obj, user.User) + + # Test that the User object has the correct attributes + assert user_obj.id == USER_SUCCINCT_DICT.get('id') + assert user_obj.name == USER_SUCCINCT_DICT.get('name') + assert user_obj.deleted is None + assert user_obj.real_name is None + assert user_obj.tz is None + assert user_obj.tz_label is None + assert user_obj.tz_offset is None diff --git a/tests/unit/models/test_unit_workspace.py b/tests/unit/models/test_unit_workspace.py new file mode 100644 index 0000000..f22e114 --- /dev/null +++ b/tests/unit/models/test_unit_workspace.py @@ -0,0 +1,81 @@ +import pytest +from slack_watchman.models import workspace + +WORKSPACE_ONE_DICT = { + 'id': 'T1234567890', + 'name': 'Example Workspace', + 'domain': 'example.com', + 'url': 'https://example.com', + 'email_domain': 'example.com', + 'is_verified': True, + 'discoverable': False, + 'enterprise_id': 'E1234567890', + 'enterprise_domain': 'example.enterprise.com', + 'enterprise_name': 'Example Enterprise', +} + +WORKSPACE_MISSING_FIELDS_DICT = { + 'id': 'T1234567890', + 'name': 'Example Workspace', +} + + +@pytest.fixture +def example_workspace_one(): + return workspace.create_from_dict(WORKSPACE_ONE_DICT) + + +@pytest.fixture +def example_workspace_missing_fields(): + return workspace.create_from_dict(WORKSPACE_MISSING_FIELDS_DICT) + + +def test_workspace_initialisation(example_workspace_one): + # Test that the Workspace object is of the correct type + assert isinstance(example_workspace_one, workspace.Workspace) + + # Test that the Workspace object has the correct attributes + assert example_workspace_one.id == WORKSPACE_ONE_DICT.get('id') + assert example_workspace_one.name == WORKSPACE_ONE_DICT.get('name') + assert example_workspace_one.domain == WORKSPACE_ONE_DICT.get('domain') + assert example_workspace_one.url == WORKSPACE_ONE_DICT.get('url') + assert example_workspace_one.email_domain == WORKSPACE_ONE_DICT.get('email_domain') + assert example_workspace_one.is_verified == WORKSPACE_ONE_DICT.get('is_verified') + assert example_workspace_one.discoverable == WORKSPACE_ONE_DICT.get('discoverable') + assert example_workspace_one.enterprise_id == WORKSPACE_ONE_DICT.get('enterprise_id') + assert example_workspace_one.enterprise_domain == WORKSPACE_ONE_DICT.get('enterprise_domain') + assert example_workspace_one.enterprise_name == WORKSPACE_ONE_DICT.get('enterprise_name') + + +def test_workspace_initialisation_with_missing_fields(example_workspace_missing_fields): + # Test that the Workspace object is of the correct type + assert isinstance(example_workspace_missing_fields, workspace.Workspace) + + # Test that the Workspace object has the correct attributes + assert example_workspace_missing_fields.id == WORKSPACE_MISSING_FIELDS_DICT.get('id') + assert example_workspace_missing_fields.name == WORKSPACE_MISSING_FIELDS_DICT.get('name') + assert example_workspace_missing_fields.domain is None + assert example_workspace_missing_fields.url is None + assert example_workspace_missing_fields.email_domain is None + assert example_workspace_missing_fields.is_verified is None + assert example_workspace_missing_fields.discoverable is None + assert example_workspace_missing_fields.enterprise_id is None + assert example_workspace_missing_fields.enterprise_domain is None + assert example_workspace_missing_fields.enterprise_name is None + + +def test_field_type(): + # Test that correct error is raised when name is not a string + workspace_dict = WORKSPACE_ONE_DICT + workspace_dict['name'] = 123 + with pytest.raises(TypeError): + test_workspace = workspace.create_from_dict(workspace_dict) + + +def test_default_values(example_workspace_missing_fields): + # Test that default fields are correctly applied + assert example_workspace_missing_fields.is_verified is None + assert example_workspace_missing_fields.discoverable is None + assert example_workspace_missing_fields.enterprise_id is None + assert example_workspace_missing_fields.enterprise_domain is None + assert example_workspace_missing_fields.enterprise_name is None From 493f9e2cda336668fd2c7c66957d59f9eebd9b0d Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 20:47:55 +0100 Subject: [PATCH 08/18] Added GitHub action to run tests --- .github/workflows/run_tests.yml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/run_tests.yml diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml new file mode 100644 index 0000000..2625b6c --- /dev/null +++ b/.github/workflows/run_tests.yml @@ -0,0 +1,32 @@ +name: Run Unit Test via Pytest + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Lint with Ruff + run: | + pip install ruff + ruff --format=github --target-version=py310 . + continue-on-error: true + - name: Test with pytest + run: | + coverage run -m pytest -v -s + - name: Generate Coverage Report + run: | + coverage report -m From 7a65d9f27702c043ac352b718f8103281306516c Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 21:10:01 +0100 Subject: [PATCH 09/18] Fix some linting issues --- src/slack_watchman/__init__.py | 14 +++++++------- tests/unit/models/test_unit_conversation.py | 6 ++++-- tests/unit/models/test_unit_post.py | 1 - 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/slack_watchman/__init__.py b/src/slack_watchman/__init__.py index 40613f6..08810d4 100644 --- a/src/slack_watchman/__init__.py +++ b/src/slack_watchman/__init__.py @@ -21,7 +21,7 @@ post, conversation ) -from slack_watchman.loggers import StdoutLogger, JSONLogger +from slack_watchman.loggers import StdoutLogger, JSONLogger, export_csv from slack_watchman.clients.slack_client import SlackClient OUTPUT_LOGGER: JSONLogger @@ -151,10 +151,10 @@ def unauthenticated_probe(workspace_domain: str, OUTPUT_LOGGER.log('SUCCESS', 'Slack Watchman started execution') OUTPUT_LOGGER.log('INFO', f'Version: {project_metadata.get("version")}') - OUTPUT_LOGGER.log('INFO', f'Created by: PaperMtn ') - OUTPUT_LOGGER.log('SUCCESS', f'Running in probe mode') - OUTPUT_LOGGER.log('SUCCESS', f'Slack Watchman will attempt an unauthenticated probe on the workspace ' - f'and return any available authentication information.') + OUTPUT_LOGGER.log('INFO', 'Created by: PaperMtn ') + OUTPUT_LOGGER.log('SUCCESS', 'Running in probe mode') + OUTPUT_LOGGER.log('SUCCESS', 'Slack Watchman will attempt an unauthenticated probe on the workspace ' + 'and return any available authentication information.') OUTPUT_LOGGER.log('SUCCESS', f'Workspace: {workspace_domain}') try: domain_information = watchman_processor.find_auth_information(workspace_domain) @@ -308,7 +308,7 @@ def main(): user_list = watchman_processor.get_users(slack_con, verbose) OUTPUT_LOGGER.log('SUCCESS', f'{len(user_list)} users discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - loggers.export_csv('slack_users', user_list) + export_csv('slack_users', user_list) OUTPUT_LOGGER.log( 'SUCCESS', f'Users output to CSV file: {os.path.join(os.getcwd(), "slack_users.csv")}') @@ -318,7 +318,7 @@ def main(): channel_list = watchman_processor.get_channels(slack_con, verbose) OUTPUT_LOGGER.log('SUCCESS', f'{len(channel_list)} channels discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - loggers.export_csv('slack_channels', channel_list) + export_csv('slack_channels', channel_list) OUTPUT_LOGGER.log( 'SUCCESS', f'Users output to CSV file: {os.path.join(os.getcwd(), "slack_channels.csv")}') diff --git a/tests/unit/models/test_unit_conversation.py b/tests/unit/models/test_unit_conversation.py index 6d4f8ad..9df72bd 100644 --- a/tests/unit/models/test_unit_conversation.py +++ b/tests/unit/models/test_unit_conversation.py @@ -119,8 +119,10 @@ def test_conversation_succinct_initialisation(example_conversation_succinct): assert example_conversation_succinct.is_mpim == CONVERSATION_SUCCINCT_DICT.get('is_mpim') assert example_conversation_succinct.is_archived == CONVERSATION_SUCCINCT_DICT.get('is_archived') assert example_conversation_succinct.creator == CONVERSATION_SUCCINCT_DICT.get('creator') - assert example_conversation_succinct.canvas_empty == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas').get('is_empty') - assert example_conversation_succinct.canvas_id == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas').get('file_id') + assert (example_conversation_succinct.canvas_empty == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas') + .get('is_empty')) + assert (example_conversation_succinct.canvas_id == CONVERSATION_SUCCINCT_DICT.get('properties').get('canvas') + .get('file_id')) def test_conversation_missing_fields(example_conversation_missing_fields): diff --git a/tests/unit/models/test_unit_post.py b/tests/unit/models/test_unit_post.py index 906c17a..77ce7ff 100644 --- a/tests/unit/models/test_unit_post.py +++ b/tests/unit/models/test_unit_post.py @@ -328,4 +328,3 @@ def test_file_field_type(): file_dict['id'] = 123 with pytest.raises(TypeError): post.create_file_from_dict(file_dict) - From 2fffed30c52768fe43c5c9e14b617f5fb415cad7 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 21:10:13 +0100 Subject: [PATCH 10/18] Adding test action --- .github/workflows/run_tests.yml | 17 +- poetry.lock | 394 ++++++++++++++++++++++++-------- pyproject.toml | 4 + 3 files changed, 315 insertions(+), 100 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 2625b6c..4dcfe08 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v3 @@ -17,15 +17,16 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with Ruff - run: | - pip install ruff - ruff --format=github --target-version=py310 . + python -m pip install --upgrade pip + pip install poetry + poetry install --with dev + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') continue-on-error: true - name: Test with pytest - run: | + run: | + pip install coverage coverage run -m pytest -v -s - name: Generate Coverage Report run: | diff --git a/poetry.lock b/poetry.lock index b1b005a..c1ff4e1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,19 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "astroid" +version = "3.3.5" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -34,101 +48,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -142,6 +171,95 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.6.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -181,6 +299,31 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + [[package]] name = "packaging" version = "24.1" @@ -192,6 +335,22 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + [[package]] name = "pluggy" version = "1.5.0" @@ -207,6 +366,35 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pylint" +version = "3.3.1" +description = "python code static checker" +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, +] + +[package.dependencies] +astroid = ">=3.3.4,<=3.4.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pytest" version = "8.3.3" @@ -334,6 +522,28 @@ files = [ {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "urllib3" version = "2.2.3" @@ -354,4 +564,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">=3.10" -content-hash = "c411ec8eea25560062fa813de399c0540fe6ce92a734d89ce08cf495e1d45823" +content-hash = "07d65388500a1db45fb853861f95f32fc28546acc17af1a2c294d1c5a7efdf44" diff --git a/pyproject.toml b/pyproject.toml index 49851eb..55eb119 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,11 @@ colorama = "^0.4.6" pyyaml = "^6.0.2" requests = "^2.32.3" beautifulsoup4 = "^4.12.3" + +[tool.poetry.group.dev.dependencies] pytest = "^8.3.3" +coverage = "^7.2.3" +pylint = "^3.3.1" [tool.poetry.scripts] slack-watchman = "slack_watchman:main" From 91ee2949a0852fac2d5434d83c27dda888537d5c Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 21:13:40 +0100 Subject: [PATCH 11/18] Adding test action --- .github/workflows/run_tests.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 4dcfe08..8b04127 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -22,12 +22,11 @@ jobs: poetry install --with dev - name: Analysing the code with pylint run: | - pylint $(git ls-files '*.py') + poetry run pylint $(git ls-files '*.py') continue-on-error: true - name: Test with pytest run: | - pip install coverage - coverage run -m pytest -v -s + poetry run coverage run -m pytest -v -s - name: Generate Coverage Report run: | - coverage report -m + poetry run coverage report -m From 82b1bf17f6a5631495cd5140a9530feb94719200 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Fri, 25 Oct 2024 22:24:54 +0100 Subject: [PATCH 12/18] Update date conversion --- src/slack_watchman/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/slack_watchman/utils.py b/src/slack_watchman/utils.py index 7a07b39..0c4e09c 100644 --- a/src/slack_watchman/utils.py +++ b/src/slack_watchman/utils.py @@ -1,4 +1,4 @@ -import time +from datetime import datetime, timezone import json import dataclasses from typing import List, Dict, Any @@ -23,8 +23,8 @@ def convert_timestamp(timestamp: str or int) -> str or None: if timestamp: if isinstance(timestamp, str): timestamp = timestamp.split('.', 1)[0] - - return time.strftime('%Y-%m-%d %H:%M:%S %Z', time.gmtime(int(timestamp))) + dt = datetime.fromtimestamp(float(timestamp), timezone.utc) + return dt.strftime('%Y-%m-%d %H:%M:%S %Z') else: return None From 8f8f2ffc3cb4d900db27cf2073c8f7613a72ab7f Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 17:04:53 +0000 Subject: [PATCH 13/18] Updates to GitHub Actions --- .github/workflows/docker_build_test.yml | 29 +++++++++++++ .../{dockerpublish.yml => docker_publish.yml} | 2 +- .github/workflows/python_package.yml | 34 +++++++++++++++ .../{pythonpublish.yml => python_publish.yml} | 1 - .../{run_tests.yml => python_run_tests.yml} | 3 +- .github/workflows/pythonpackage.yml | 43 ------------------- 6 files changed, 66 insertions(+), 46 deletions(-) create mode 100644 .github/workflows/docker_build_test.yml rename .github/workflows/{dockerpublish.yml => docker_publish.yml} (96%) create mode 100644 .github/workflows/python_package.yml rename .github/workflows/{pythonpublish.yml => python_publish.yml} (99%) rename .github/workflows/{run_tests.yml => python_run_tests.yml} (98%) delete mode 100644 .github/workflows/pythonpackage.yml diff --git a/.github/workflows/docker_build_test.yml b/.github/workflows/docker_build_test.yml new file mode 100644 index 0000000..718804e --- /dev/null +++ b/.github/workflows/docker_build_test.yml @@ -0,0 +1,29 @@ +name: Build and Test Docker Image + +on: + push: + +env: + TEST_TAG: papermountain/slack-watchman:test + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build + uses: docker/build-push-action@v6 + with: + load: true + tags: myimage:latest + + - name: Inspect + run: | + docker image inspect myimage:latest + + - name: Test + run: | + docker run --rm ${{ env.TEST_TAG }} --version + docker run --rm ${{ env.TEST_TAG }} --help \ No newline at end of file diff --git a/.github/workflows/dockerpublish.yml b/.github/workflows/docker_publish.yml similarity index 96% rename from .github/workflows/dockerpublish.yml rename to .github/workflows/docker_publish.yml index 563e8aa..c2a5f31 100644 --- a/.github/workflows/dockerpublish.yml +++ b/.github/workflows/docker_publish.yml @@ -1,4 +1,4 @@ -name: ci +name: Publish Docker Image on: push: diff --git a/.github/workflows/python_package.yml b/.github/workflows/python_package.yml new file mode 100644 index 0000000..652daf2 --- /dev/null +++ b/.github/workflows/python_package.yml @@ -0,0 +1,34 @@ +name: Test Python Package + +on: + push: + branches: [ develop, feature/**, release/**, hotfix/** ] + pull_request: + branches: [ develop, feature/**, release/**, hotfix/** ] + +jobs: + build-ubuntu: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + pip install poetry + poetry install + - name: Test setup & install + run: | + poetry build + python3 -m pip install dist/*.whl + - name: Test run + run: | + slack-watchman --version + slack-watchman --help \ No newline at end of file diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/python_publish.yml similarity index 99% rename from .github/workflows/pythonpublish.yml rename to .github/workflows/python_publish.yml index 726e866..ee88dff 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/python_publish.yml @@ -1,4 +1,3 @@ - name: Poetry Publish on: diff --git a/.github/workflows/run_tests.yml b/.github/workflows/python_run_tests.yml similarity index 98% rename from .github/workflows/run_tests.yml rename to .github/workflows/python_run_tests.yml index 8b04127..3a040a5 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/python_run_tests.yml @@ -1,6 +1,7 @@ name: Run Unit Test via Pytest -on: [push] +on: + push: jobs: build: diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 9cdac40..0000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,43 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ develop, feature/**, release/** ] - pull_request: - branches: [ develop, feature/**, release/** ] - -jobs: - build-ubuntu: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.12'] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install flake8 poetry - poetry install - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test setup & install - run: | - poetry build - python3 -m pip install dist/*.whl - - name: Test run - run: | - slack-watchman --version - slack-watchman --help \ No newline at end of file From 57916e60a9f53c0da8d03de25db582f978296135 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 17:10:29 +0000 Subject: [PATCH 14/18] Updates to GitHub Actions --- .github/workflows/docker_build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker_build_test.yml b/.github/workflows/docker_build_test.yml index 718804e..86b5367 100644 --- a/.github/workflows/docker_build_test.yml +++ b/.github/workflows/docker_build_test.yml @@ -17,7 +17,7 @@ jobs: uses: docker/build-push-action@v6 with: load: true - tags: myimage:latest + tags: ${{ env.TEST_TAG }} - name: Inspect run: | From ba42e10747a3a8fd07e58464301a434508303975 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 17:18:41 +0000 Subject: [PATCH 15/18] Updates to GitHub Actions --- .github/workflows/docker_build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker_build_test.yml b/.github/workflows/docker_build_test.yml index 86b5367..db5738c 100644 --- a/.github/workflows/docker_build_test.yml +++ b/.github/workflows/docker_build_test.yml @@ -21,7 +21,7 @@ jobs: - name: Inspect run: | - docker image inspect myimage:latest + docker image inspect ${{ env.TEST_TAG }} - name: Test run: | From 6e833bb4674eda01f034a2016f8cc5c0f5a64911 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 18:57:47 +0000 Subject: [PATCH 16/18] Remove commented code --- Dockerfile | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/Dockerfile b/Dockerfile index 383733c..dc7cc02 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,22 +1,3 @@ -# syntax=docker/dockerfile:1 - -#FROM python:3.12-slim-bullseye -#WORKDIR /opt/slack-watchman -#COPY . /opt/slack-watchman -#RUN pip install poetry -#ENV PYTHONPATH=/opt/slack-watchman \ -# SLACK_WATCHMAN_TOKEN="" \ -# SLACK_WATCHMAN_COOKIE="" \ -# SLACK_WATCHMAN_URL="" -#RUN poetry config virtualenvs.create false && \ -# poetry install --no-dev && \ -# chmod -R 700 . && \ -# poetry build && \ -# pip install dist/*.whl -#STOPSIGNAL SIGINT -#WORKDIR /opt/slack-watchman -#ENTRYPOINT ["slack-watchman"] - # syntax=docker/dockerfile:1 FROM python:3.12-slim-bullseye AS builder WORKDIR /opt/slack-watchman From 99217576207776633fe3cae4b7513fb251626bd4 Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 20:35:20 +0000 Subject: [PATCH 17/18] Version bump --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef99890..2e1c4a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,17 @@ ## [4.3.0] - 2024-10-x ### Changed - Timestamps are now in UTC across all logging for consistency +- Refactor some commonly used functions into a utils module +- More general code cleanup and refactoring ### Fixed - Fixed a few bugs with models for User, Workspace and Messages not picking up all values +### Added +- GitHub actions for Python tests and Docker build and run testing +- Implemented unit tests for models + + ## [4.2.0] - 2024-09-27 ### Added - Added enumeration of conversations with populated Canvases attached. These can contain sensitive information, and are worth reviewing. diff --git a/pyproject.toml b/pyproject.toml index 55eb119..59d674a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "slack-watchman" -version = "4.2.0" +version = "4.3.0" description = "Monitoring and enumerating Slack for exposed secrets" authors = ["PaperMtn "] license = "GPL-3.0" From d29c65b1949072ee5899cd3e0f76dcc5349dd2fd Mon Sep 17 00:00:00 2001 From: PaperMtn Date: Sun, 27 Oct 2024 20:36:10 +0000 Subject: [PATCH 18/18] Version bump --- CHANGELOG.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e1c4a5..68df5eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## [4.3.0] - 2024-10-x +## [4.3.0] - 2024-10-27 ### Changed - Timestamps are now in UTC across all logging for consistency - Refactor some commonly used functions into a utils module @@ -11,7 +11,6 @@ - GitHub actions for Python tests and Docker build and run testing - Implemented unit tests for models - ## [4.2.0] - 2024-09-27 ### Added - Added enumeration of conversations with populated Canvases attached. These can contain sensitive information, and are worth reviewing.