diff --git a/.gitignore b/.gitignore index c2f4a25..933a27b 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,5 @@ CustomerClones/ **cache** **.DS_Store !**cached_llm.py -quorum_project/ \ No newline at end of file +quorum_project/ +dist/ \ No newline at end of file diff --git a/Quorum/apis/block_explorers/source_code.py b/Quorum/apis/block_explorers/source_code.py index 65864a2..8748f98 100644 --- a/Quorum/apis/block_explorers/source_code.py +++ b/Quorum/apis/block_explorers/source_code.py @@ -47,7 +47,7 @@ def _parse_source_code(self) -> None: self._parsed_contract = contract_ast[contract_name]['ast'] except Exception as e: - pp.pretty_print(f"Error parsing source code for {self.file_name}: {e}\n" + pp.pprint(f"Error parsing source code for {self.file_name}: {e}\n" f"Some of the checks will not apply to this contract!!!", pp.Colors.FAILURE) finally: diff --git a/Quorum/apis/git_api/git_manager.py b/Quorum/apis/git_api/git_manager.py index 34533bc..9e21d61 100644 --- a/Quorum/apis/git_api/git_manager.py +++ b/Quorum/apis/git_api/git_manager.py @@ -52,12 +52,12 @@ def _load_repos_from_file(self, gt_config: dict[str, any]) -> tuple[dict[str, st def __clone_or_update_for_repo(repo_name: str, repo_url: str, to_path: Path): repo_path = to_path / repo_name if repo_path.exists(): - pp.pretty_print(f"Repository {repo_name} already exists at {repo_path}. Updating repo and submodules.", pp.Colors.INFO) + pp.pprint(f"Repository {repo_name} already exists at {repo_path}. Updating repo and submodules.", pp.Colors.INFO) repo = Repo(repo_path) repo.git.pull() repo.git.submodule('update', '--init', '--recursive') else: - pp.pretty_print(f"Cloning {repo_name} from URL: {repo_url} to {repo_path}...", pp.Colors.INFO) + pp.pprint(f'Cloning {repo_name} from URL: {repo_url} to {repo_path}...', pp.Colors.INFO) Repo.clone_from(repo_url, repo_path, multi_options=["--recurse-submodules"]) @@ -68,7 +68,7 @@ def clone_or_update(self) -> None: If the repository already exists locally, it will update the repository and its submodules. Otherwise, it will clone the repository and initialize submodules. """ - + pp.pprint('Cloning and updating preliminaries', pp.Colors.INFO, pp.Heading.HEADING_2) for repo_name, repo_url in self.repos.items(): GitManager.__clone_or_update_for_repo(repo_name, repo_url, self.modules_path) diff --git a/Quorum/apis/governance/data_models.py b/Quorum/apis/governance/data_models.py index 8fb16de..b9ec13a 100644 --- a/Quorum/apis/governance/data_models.py +++ b/Quorum/apis/governance/data_models.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from pydantic import BaseModel, Field from Quorum.utils.chain_enum import Chain @@ -49,9 +49,9 @@ class BGDProposalData(BaseModel): """ ipfs: Optional[IPFSData] = None proposal: Optional[ProposalData] = None - events: List[EventData] = Field(default_factory=list) + events: list[EventData] = Field(default_factory=list) class PayloadAddresses(BaseModel): chain: Chain - addresses: List[str] + addresses: list[str] diff --git a/Quorum/checks/diff.py b/Quorum/checks/diff.py index 555408b..2115d51 100644 --- a/Quorum/checks/diff.py +++ b/Quorum/checks/diff.py @@ -123,21 +123,29 @@ def __print_diffs_results(self, missing_files: list[SourceCode], files_with_diff missing_files (list[SourceCode]): A list of SourceCode objects representing missing files. files_with_diffs (list[Compared]): A list of Compared objects representing files with differences. """ - total_number_of_files = len(self.source_codes) - number_of_missing_files = len(missing_files) - number_of_files_with_diffs = len(files_with_diffs) - - msg = f"Compared {total_number_of_files - number_of_missing_files}/{total_number_of_files} files for proposal {self.proposal_address}" - if number_of_missing_files == 0: - pp.pretty_print(msg, pp.Colors.SUCCESS) - else: - pp.pretty_print(msg, pp.Colors.WARNING) - for source_code in missing_files: - pp.pretty_print(f"Missing file: {source_code.file_name} in local repo", pp.Colors.WARNING) - - if number_of_files_with_diffs == 0: - pp.pretty_print("No differences found.", pp.Colors.SUCCESS) - else: - pp.pretty_print(f"Found differences in {number_of_files_with_diffs} files", pp.Colors.FAILURE) - for compared_pair in files_with_diffs: - pp.pretty_print(f"Local: {compared_pair.local_file}\nProposal: {compared_pair.proposal_file}\nDiff: {compared_pair.diff}", pp.Colors.FAILURE) + num_total_files = len(self.source_codes) + num_missing_files = len(missing_files) + num_diff_files = len(files_with_diffs) + num_identical = num_total_files - num_missing_files - num_diff_files + + # Identical files message. + pp.pprint(f'Files found identical: {num_identical}/{num_total_files}\n', pp.Colors.SUCCESS) + + # Diffs files message. + if num_diff_files > 0: + diffs_msg = ('Proposal files found to deviate from their source of truth counterpart: ' + f'{num_diff_files}/{num_total_files}\n') + for i, compared_pair in enumerate(files_with_diffs, 1): + diffs_msg += (f'\t{i}. Proposal file: {compared_pair.proposal_file}\n' + f'\t Source of truth file: {compared_pair.local_file}\n' + f'\t Diff can be found here: {compared_pair.diff}\n') + pp.pprint(diffs_msg, pp.Colors.FAILURE) + + # Missing files message. + if num_missing_files > 0: + missing_msg = ('Proposal files missing from source of truth: ' + f'{num_missing_files}/{num_total_files}\n') + for i, source_code in enumerate(missing_files, 1): + missing_msg += f'\t{i}. File: {source_code.file_name}\n' + pp.pprint(missing_msg, pp.Colors.WARNING) + diff --git a/Quorum/checks/global_variables.py b/Quorum/checks/global_variables.py index df7db06..187f489 100644 --- a/Quorum/checks/global_variables.py +++ b/Quorum/checks/global_variables.py @@ -1,7 +1,5 @@ -import re from pathlib import Path - from Quorum.checks.check import Check from Quorum.apis.block_explorers.source_code import SourceCode import Quorum.utils.pretty_printer as pp @@ -75,11 +73,16 @@ def __process_results(self, source_code_to_violated_variables: dict[str, list[di to lists of violated variables. """ if not source_code_to_violated_variables: - pp.pretty_print("All global variables are constant or immutable.", pp.Colors.SUCCESS) - else: - pp.pretty_print("Global variable checks failed:", pp.Colors.FAILURE) - for file_name, violated_variables in source_code_to_violated_variables.items(): - pp.pretty_print(f"File {file_name} contains variables that are not constant or immutable" - ,pp.Colors.FAILURE) - self._write_to_file(Path(file_name).stem.removesuffix(".sol"), violated_variables) + pp.pprint('All global variables are constant or immutable.', pp.Colors.SUCCESS) + return + + msg = ("Some global variables aren't constant or immutable. A storage collision may occur!\n" + f'The following variables found to be storage variables: ') + i = 1 + for file_name, violated_variables in source_code_to_violated_variables.items(): + for var in violated_variables: + msg += f"\t{i}. File {file_name}: {var['name']}" + i += 1 + self._write_to_file(Path(file_name).stem.removesuffix('.sol'), violated_variables) + pp.pprint(msg, pp.Colors.FAILURE) \ No newline at end of file diff --git a/Quorum/checks/new_listing.py b/Quorum/checks/new_listing.py index 023ec60..faa7e8a 100644 --- a/Quorum/checks/new_listing.py +++ b/Quorum/checks/new_listing.py @@ -5,7 +5,6 @@ class NewListingCheck(Check): - def new_listing_check(self) -> None: """ Checks if the proposal address is a new listing on the blockchain. @@ -14,35 +13,41 @@ def new_listing_check(self) -> None: no new listings were found. """ functions = self._get_functions_from_source_codes() - if functions.get("newListings", functions.get("newListingsCustom")): - pp.pretty_print(f"New listings detected for {self.proposal_address}", pp.Colors.WARNING) + if functions.get('newListings', functions.get('newListingsCustom')): + pp.pprint(f'New listings detected for payload {self.proposal_address}', pp.Colors.WARNING) - # Check if Anthropic API key is configured - if not config.ANTHROPIC_API_KEY: - pp.pretty_print( - "First deposit check is skipped. If you have a LLM API key, you can add it to your environment variables to enable this check", + proposal_code = self.source_codes[0].file_content + proposal_code_str = '\n'.join(proposal_code) + try: + listings: ListingArray | None = FirstDepositChain().execute(proposal_code_str) + except: + pp.pprint( + 'New listings were detected in payload but first deposit check is skipped.\n' + 'If you have a LLM API key, you can add it to your environment variables to enable this check', pp.Colors.WARNING ) return - proposal_code = self.source_codes[0].file_content - proposal_code_str = '\n'.join(proposal_code) - listings: ListingArray | None = FirstDepositChain().execute(proposal_code_str) if listings is None: - pp.pretty_print(f"Failed to retrieve new listings for {self.proposal_address}", pp.Colors.FAILURE) + pp.pprint('New listings were detected in payload but LLM failed to retrieve them.', + pp.Colors.FAILURE) return - for listing in listings.listings: - if listing.approve_indicator and listing.supply_indicator: - pp.pretty_print( - f"New listing detected for {listing}", pp.Colors.SUCCESS - ) - else: - pp.pretty_print(f"New listing detected for {listing.asset_symbol} but no approval or supply detected", pp.Colors.FAILURE) - self._write_to_file("new_listings.json", listings.model_dump()) - + + pp.pprint(f'{len(listings.listings)} new asset listings were detected:', pp.Colors.INFO) + for i, listing in enumerate(listings.listings, 1): + pp.pprint(f'\t{i}. Variable: {listing.asset_symbol}\n' + f'\t Asset address: {listing.asset_address}\n' + f'\t Approve indicator: {listing.approve_indicator}\n' + f'\t Supply seed amount: {listing.supply_seed_amount}\n' + f'\t Supply indicator: {listing.supply_indicator}', + (pp.Colors.SUCCESS if listing.approve_indicator and listing.supply_indicator + else pp.Colors.FAILURE)) + + self._write_to_file('new_listings.json', listings.model_dump()) + else: - pp.pretty_print(f"No new listings detected for {self.proposal_address}", pp.Colors.INFO) - + pp.pprint(f'No new listings detected for {self.proposal_address}', pp.Colors.INFO) + def _get_functions_from_source_codes(self) -> dict: """ Retrieves functions from the source codes. diff --git a/Quorum/checks/price_feed.py b/Quorum/checks/price_feed.py index f9a79a8..d00b58f 100644 --- a/Quorum/checks/price_feed.py +++ b/Quorum/checks/price_feed.py @@ -1,51 +1,27 @@ from pathlib import Path import re +from dataclasses import dataclass -from Quorum.apis.price_feeds import PriceFeedProviderBase +from Quorum.apis.price_feeds import PriceFeedProviderBase, PriceFeedData from Quorum.utils.chain_enum import Chain from Quorum.checks.check import Check from Quorum.apis.block_explorers.source_code import SourceCode import Quorum.utils.pretty_printer as pp -def remove_solidity_comments(source_code: str) -> str: - """ - Removes single-line and multi-line comments from Solidity source code. - - Args: - source_code (str): The Solidity source code as a single string. - - Returns: - str: The source code with comments removed. - """ - # Regex pattern to match single-line comments (//...) - single_line_comment_pattern = r'//.*?$' - - # Regex pattern to match multi-line comments (/*...*/) - multi_line_comment_pattern = r'/\*.*?\*/' - - # First, remove multi-line comments - source_code = re.sub(multi_line_comment_pattern, '', source_code, flags=re.DOTALL) - - # Then, remove single-line comments - source_code = re.sub(single_line_comment_pattern, '', source_code, flags=re.MULTILINE) - - return source_code - - class PriceFeedCheck(Check): """ The PriceFeedCheck class is responsible for verifying the price feed addresses in the source code against official Chainlink or Chronical data. """ - def __init__( self, customer: str, chain: Chain, proposal_address: str, source_codes: list[SourceCode], - providers: list[PriceFeedProviderBase] + price_feed_providers: list[PriceFeedProviderBase], + token_providers: list[PriceFeedProviderBase] ) -> None: """ Initializes the PriceFeedCheck object with customer information, proposal address, @@ -60,52 +36,49 @@ def __init__( """ super().__init__(customer, chain, proposal_address, source_codes) self.address_pattern = r'0x[a-fA-F0-9]{40}' - self.providers = providers + self.price_feed_providers = price_feed_providers + self.token_providers = token_providers + + @dataclass + class PriceFeedResult: + ''' + This dataclass helps organize the results of the check for printing them to the user. + ''' + address: str + found_on: str + price_feed: PriceFeedData - def __check_price_feed_address(self, address: str, file_name: str) -> dict | None: + def __hash__(self): + return hash(self.address) + + def __check_address(self, address: str, providers: list[PriceFeedProviderBase]) -> PriceFeedResult | None: """ Check if the given address is present in the price feed providers. Args: address (str): The address to be checked. - file_name (str): The name of the source code file where the address was found. + providers (list[PriceFeedProviderBase]): The list of price feed providers to check against. Returns: - dict | None: The price feed data if the address is found, otherwise None. + PriceFeedResult | None: The price feed data if the address is found, otherwise None. """ - for provider in self.providers: + for provider in providers: if (price_feed := provider.get_price_feed(self.chain, address)): - - color = pp.Colors.SUCCESS - message = f"Found {address} on {provider.get_name()}\n" - message += str(price_feed) - if price_feed.proxy_address and price_feed.proxy_address.lower() != address.lower(): - message += f"Proxy address: {price_feed.proxy_address}\n" - if address.lower() != price_feed.address.lower(): - color = pp.Colors.FAILURE - message += f"This is an implementation contract with a proxy address\n" - message += f"Origin Address: {price_feed.address}\n" - - pp.pretty_print( - message, - color - ) - return price_feed.model_dump() - - pp.pretty_print( - f"Address {address} not found in any address validation provider: {[p.get_name() for p in self.providers]}", - pp.Colors.INFO - ) + return PriceFeedCheck.PriceFeedResult(address, provider.get_name(), price_feed) return None def verify_price_feed(self) -> None: """ - Verifies the price feed addresses in the source code against official Chainlink or Chronical data. + Verifies the price feed addresses in the source code against official Chainlink or Chronicle data. This method iterates through each source code file to find and verify the address variables - against the official Chainlink and Chronical price feeds. It categorizes the addresses into + against the official Chainlink and Chronicle price feeds. It categorizes the addresses into verified and violated based on whether they are found in the official source. """ + verified_price_feeds: set[PriceFeedCheck.PriceFeedResult] = set() + verified_tokens: set[PriceFeedCheck.PriceFeedResult] = set() + unverified_addresses: set[str] = set() + # Iterate through each source code file to find and verify address variables for source_code in self.source_codes: verified_sources_path = f"{Path(source_code.file_name).stem.removesuffix('.sol')}/verified_sources.json" @@ -115,14 +88,76 @@ def verify_price_feed(self) -> None: contract_text = '\n'.join(source_code.file_content) # Remove comments from the source code - clean_text = remove_solidity_comments(contract_text) + clean_text = PriceFeedCheck.remove_solidity_comments(contract_text) # Extract unique addresses using regex addresses = set(re.findall(self.address_pattern, clean_text)) - + for address in addresses: - if feed := self.__check_price_feed_address(address, source_code.file_name): - verified_variables.append(feed) - + if res := self.__check_address(address, self.price_feed_providers): + verified_variables.append(res.price_feed.model_dump()) + verified_price_feeds.add(res) + elif res := self.__check_address(address, self.token_providers): + verified_variables.append(res.price_feed.model_dump()) + verified_tokens.add(res) + else: + unverified_addresses.add(address) + if verified_variables: self._write_to_file(verified_sources_path, verified_variables) + + num_addresses = len(verified_price_feeds) + len(verified_tokens) + len(unverified_addresses) + pp.pprint(f'{num_addresses} addresses identified in the payload.\n', pp.Colors.INFO) + + # Print price feed validation + pp.pprint('Price Feed Validation', pp.Colors.INFO, pp.Heading.HEADING_3) + msg = (f'{len(verified_price_feeds)}/{num_addresses} ' + 'were identified as price feeds of the configured providers:\n') + for i, var_res in enumerate(verified_price_feeds, 1): + msg += (f'\t{i}. {var_res.address} found on {var_res.found_on}\n' + f'\t Name: {var_res.price_feed.name}\n' + f'\t Decimals: {var_res.price_feed.decimals}\n') + pp.pprint(msg, pp.Colors.SUCCESS) + + # Print token validation + pp.pprint('Token Validation', pp.Colors.INFO, pp.Heading.HEADING_3) + msg = (f'{len(verified_tokens)}/{num_addresses} ' + 'were identified as tokens of the configured providers:\n') + for i, var_res in enumerate(verified_tokens, 1): + msg += (f'\t{i}. {var_res.address} found on {var_res.found_on}\n' + f'\t Name: {var_res.price_feed.name}\n' + f'\t Symbol: {var_res.price_feed.pair}\n' + f'\t Decimals: {var_res.price_feed.decimals}\n') + pp.pprint(msg, pp.Colors.SUCCESS) + + # Print not found + msg = (f'{len(unverified_addresses)}/{num_addresses} ' + 'explicit addresses were not identified using any provider:\n') + for i, address in enumerate(unverified_addresses, 1): + msg += f'\t{i}. {address}\n' + pp.pprint(msg, pp.Colors.FAILURE) + + @staticmethod + def remove_solidity_comments(source_code: str) -> str: + """ + Removes single-line and multi-line comments from Solidity source code. + + Args: + source_code (str): The Solidity source code as a single string. + + Returns: + str: The source code with comments removed. + """ + # Regex pattern to match single-line comments (//...) + single_line_comment_pattern = r'//.*?$' + + # Regex pattern to match multi-line comments (/*...*/) + multi_line_comment_pattern = r'/\*.*?\*/' + + # First, remove multi-line comments + source_code = re.sub(multi_line_comment_pattern, '', source_code, flags=re.DOTALL) + + # Then, remove single-line comments + source_code = re.sub(single_line_comment_pattern, '', source_code, flags=re.MULTILINE) + + return source_code diff --git a/Quorum/checks/proposal_check.py b/Quorum/checks/proposal_check.py index ba5feec..298b556 100644 --- a/Quorum/checks/proposal_check.py +++ b/Quorum/checks/proposal_check.py @@ -14,6 +14,16 @@ class CustomerConfig(BaseModel): customer: str payload_addresses: list[PayloadAddresses] + def __str__(self): + s = f'Customer: {self.customer}\nChains and payloads:\n' + for pa in self.payload_addresses: + if len(pa.addresses) == 0: + continue + s += f'* {pa.chain}:\n' + for address in pa.addresses: + s += f'\t- {address}\n' + return s + class ProposalConfig(BaseModel): customers_config: list[CustomerConfig] @@ -38,24 +48,30 @@ def run_customer_proposal_validation(prop_config: ProposalConfig) -> None: >>> run_batch(prop_config) """ for config in prop_config.customers_config: + pp.pprint('Run Preparation', pp.Colors.INFO, pp.Heading.HEADING_1) ground_truth_config = ConfigLoader.load_customer_config(config.customer) - git_manager = GitManager(config.customer, ground_truth_config) git_manager.clone_or_update() - price_feed_providers = ground_truth_config.get("price_feed_providers", []) + token_providers = ground_truth_config.get("token_validation_providers", []) + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) + + pp.pprint('Run Metadata', pp.Colors.INFO, pp.Heading.HEADING_2) + pp.pprint(str(config), pp.Colors.INFO) + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) for pa in config.payload_addresses: proposals_check( customer=config.customer, chain=pa.chain, proposal_addresses=pa.addresses, - providers=price_feed_providers + price_feed_providers=price_feed_providers, + token_providers=token_providers ) - -def proposals_check(customer: str, chain: Chain, proposal_addresses: list[str], providers: list[PriceFeedProviderBase]) -> None: +def proposals_check(customer: str, chain: Chain, proposal_addresses: list[str], + price_feed_providers: list[PriceFeedProviderBase], token_providers: list[PriceFeedProviderBase] = None) -> None: """ Check and compare source code files for given proposals. @@ -69,34 +85,45 @@ def proposals_check(customer: str, chain: Chain, proposal_addresses: list[str], """ api = ChainAPI(chain) - pp.pretty_print(f"Processing customer {customer}, for chain: {chain}", pp.Colors.INFO) for proposal_address in proposal_addresses: - pp.pretty_print(f"Processing proposal {proposal_address}", pp.Colors.INFO) + pp.pprint(f'Analyzing payload {proposal_address} on {chain}', pp.Colors.INFO, pp.Heading.HEADING_1) try: source_codes = api.get_source_code(proposal_address) - except ValueError as e: + except ValueError: error_message = ( f"Payload address {proposal_address} is not verified on {chain.name} explorer.\n" "We do not recommend to approve this proposal until the code is approved!\n" "Try contacting the proposer and ask them to verify the contract.\n" "No further checks are being performed on this payload." ) - pp.pretty_print(error_message, pp.Colors.FAILURE) + pp.pprint(error_message, pp.Colors.FAILURE) # Skip further checks for this proposal continue # Diff check + pp.pprint('Check 1 - Comparing payload contract and imports with the source of truth', + pp.Colors.INFO, pp.Heading.HEADING_2) missing_files = Checks.DiffCheck(customer, chain, proposal_address, source_codes).find_diffs() + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) # Review diff check + pp.pprint(f'Check 2 - Verifying missing files against customer review repo', + pp.Colors.INFO, pp.Heading.HEADING_2) Checks.ReviewDiffCheck(customer, chain, proposal_address, missing_files).find_diffs() + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) # Global variables check + pp.pprint('Check 3 - Global variables', pp.Colors.INFO, pp.Heading.HEADING_2) Checks.GlobalVariableCheck(customer, chain, proposal_address, missing_files).check_global_variables() + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) # Feed price check - Checks.PriceFeedCheck(customer, chain, proposal_address, missing_files, providers).verify_price_feed() - + pp.pprint('Check 4 - Explicit addresses validation', pp.Colors.INFO, pp.Heading.HEADING_2) + Checks.PriceFeedCheck(customer, chain, proposal_address, missing_files, price_feed_providers, token_providers).verify_price_feed() + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) + # New listing check + pp.pprint('Check 5 - First deposit for new listing', pp.Colors.INFO, pp.Heading.HEADING_2) Checks.NewListingCheck(customer, chain, proposal_address, missing_files).new_listing_check() + pp.pprint(pp.SEPARATOR_LINE, pp.Colors.INFO) diff --git a/Quorum/checks/review_diff.py b/Quorum/checks/review_diff.py index 34afc98..10a949a 100644 --- a/Quorum/checks/review_diff.py +++ b/Quorum/checks/review_diff.py @@ -10,6 +10,5 @@ def __init__(self, customer: str, chain: Chain, proposal_address: str, source_co self.target_repo = self.customer_folder / "review_module" def find_diffs(self) -> list[SourceCode]: - pp.pretty_print(f'Verifying missing files against {self.customer} review repo ' - f'(cloned under {self.target_repo})', pp.Colors.INFO) - return super().find_diffs() \ No newline at end of file + pp.pprint(f'Review repo cloned under: {self.target_repo}', pp.Colors.INFO) + return super().find_diffs() diff --git a/Quorum/entry_points/implementations/create_report.py b/Quorum/entry_points/implementations/create_report.py index d116a72..5c7a102 100644 --- a/Quorum/entry_points/implementations/create_report.py +++ b/Quorum/entry_points/implementations/create_report.py @@ -3,7 +3,7 @@ from jinja2 import Environment, FileSystemLoader import Quorum.auto_report.aave_tags as aave_tags -import Quorum.utils.pretty_printer as pprinter +import Quorum.utils.pretty_printer as pp def run_create_report(args: argparse.Namespace): @@ -32,18 +32,18 @@ def run_create_report(args: argparse.Namespace): args.generate_report_path = Path(f'v3-{args.proposal_id}.md') - pprinter.pretty_print(f'Generating a report using template in {args.template}', pprinter.Colors.INFO) + pp.pprint(f'Generating a report using template in {args.template}', pp.Colors.INFO) env = Environment(loader=FileSystemLoader(args.template.parent)) env.globals.update(zip=zip) template = env.get_template(args.template.name) - pprinter.pretty_print(f'Retrieving tag information for proposal {args.proposal_id}', pprinter.Colors.INFO) + pp.pprint(f'Retrieving tag information for proposal {args.proposal_id}', pp.Colors.INFO) tags = aave_tags.get_aave_tags(args.proposal_id) - pprinter.pretty_print(f'Tag information retrieved', pprinter.Colors.INFO) + pp.pprint(f'Tag information retrieved', pp.Colors.INFO) report = template.render(tags) with open(args.generate_report_path, 'w') as f: f.write(report) - pprinter.pretty_print(f'Created report at {args.generate_report_path}.', pprinter.Colors.SUCCESS) + pp.pprint(f'Created report at {args.generate_report_path}.', pp.Colors.SUCCESS) diff --git a/Quorum/entry_points/implementations/ipfs_validator.py b/Quorum/entry_points/implementations/ipfs_validator.py index d076e99..7a7d4b6 100644 --- a/Quorum/entry_points/implementations/ipfs_validator.py +++ b/Quorum/entry_points/implementations/ipfs_validator.py @@ -76,8 +76,8 @@ def run_ipfs_validator(args: argparse.Namespace): ) if answer.incompatibilities: - pp.pretty_print("Found incompatibilities:", pp.Colors.FAILURE) + pp.pprint("Found incompatibilities:", pp.Colors.FAILURE) for incompatibility in answer.incompatibilities: - pp.pretty_print(incompatibility, pp.Colors.FAILURE) + pp.pprint(incompatibility, pp.Colors.FAILURE) else: - pp.pretty_print("LLM found no incompatibilities. Please Check manually.", pp.Colors.WARNING) + pp.pprint("LLM found no incompatibilities. Please Check manually.", pp.Colors.WARNING) diff --git a/Quorum/entry_points/implementations/setup_quorum.py b/Quorum/entry_points/implementations/setup_quorum.py index 340d75c..f9eef1d 100644 --- a/Quorum/entry_points/implementations/setup_quorum.py +++ b/Quorum/entry_points/implementations/setup_quorum.py @@ -27,11 +27,11 @@ def run_setup_quorum(args: argparse.Namespace): OSError: If there are filesystem permission issues shutil.Error: If file copy operations fail """ - templates_dir = Path(__file__).parent.parent / 'templates' + templates_dir = Path(__file__).parent.parent.parent / 'templates' target_dir = args.working_dir.resolve() if not target_dir.exists(): - pp.pretty_print(f"Creating directory: {target_dir}", pp.Colors.INFO) + pp.pprint(f"Creating directory: {target_dir}", pp.Colors.INFO) target_dir.mkdir(parents=True, exist_ok=True) # Collect all file names to copy from the templates directory @@ -42,14 +42,14 @@ def run_setup_quorum(args: argparse.Namespace): dest = target_dir / '.env' if file_name == '.env.example' else target_dir / file_name if dest.exists(): - pp.pretty_print(f"File exists: {dest}. Skipping.", pp.Colors.WARNING) + pp.pprint(f"File exists: {dest}. Skipping.", pp.Colors.WARNING) continue shutil.copy(src, dest) - pp.pretty_print(f"Copied {file_name} to {dest}", pp.Colors.SUCCESS) + pp.pprint(f"Copied {file_name} to {dest}", pp.Colors.SUCCESS) # Add export QUORUM_PATH="path_to_your_quorum_directory" to the new .env file with open(target_dir / '.env', 'a') as f: f.write(f'\nexport QUORUM_PATH="{target_dir}"\n') - pp.pretty_print("Quorum setup completed successfully!", pp.Colors.SUCCESS) + pp.pprint("Quorum setup completed successfully!", pp.Colors.SUCCESS) diff --git a/Quorum/tests/test_checks.py b/Quorum/tests/test_checks.py index 3db3976..fd55bd2 100644 --- a/Quorum/tests/test_checks.py +++ b/Quorum/tests/test_checks.py @@ -38,7 +38,7 @@ def test_global_variables(source_codes: list[SourceCode], tmp_output_path: Path) @pytest.mark.parametrize('source_codes', ['ETH/0xAD6c03BF78A3Ee799b86De5aCE32Bb116eD24637'], indirect=True) def test_price_feed_check(source_codes: list[SourceCode], tmp_output_path: Path): price_feed_check = Checks.PriceFeedCheck('Aave', Chain.ETH, '', source_codes, [ - ChainLinkAPI()]) + ChainLinkAPI()], []) price_feed_check.verify_price_feed() assert sorted([p.name for p in price_feed_check.check_folder.iterdir()]) == ['AaveV2Ethereum'] diff --git a/Quorum/tests/test_price_feed.py b/Quorum/tests/test_price_feed.py index 7f2fe70..61da6c4 100644 --- a/Quorum/tests/test_price_feed.py +++ b/Quorum/tests/test_price_feed.py @@ -2,7 +2,7 @@ from Quorum.apis.block_explorers.source_code import SourceCode -import Quorum.checks as Checks +from Quorum.checks.price_feed import PriceFeedCheck from Quorum.utils.chain_enum import Chain from Quorum.apis.price_feeds import ChainLinkAPI @@ -11,8 +11,8 @@ @pytest.mark.parametrize('source_codes', ['ETH/0xAD6c03BF78A3Ee799b86De5aCE32Bb116eD24637'], indirect=True) def test_price_feed(source_codes: list[SourceCode], tmp_output_path: Path): - price_feed_check = Checks.PriceFeedCheck('Aave', Chain.ETH, '', source_codes, [ - ChainLinkAPI()]) + price_feed_check = PriceFeedCheck('Aave', Chain.ETH, '', source_codes, [ + ChainLinkAPI()], []) price_feed_check.verify_price_feed() assert sorted([p.name for p in price_feed_check.check_folder.iterdir()]) == ['AaveV2Ethereum'] @@ -53,7 +53,7 @@ def test_source_code_clean(): } } """ - cleaned = Checks.price_feed.remove_solidity_comments(code) + cleaned = PriceFeedCheck.remove_solidity_comments(code) expected = """pragma solidity ^0.8.0; import {IProposalGenericExecutor} from 'aave-helpers/interfaces/IProposalGenericExecutor.sol'; diff --git a/Quorum/utils/arg_validations.py b/Quorum/utils/arg_validations.py index 23596af..012fb03 100644 --- a/Quorum/utils/arg_validations.py +++ b/Quorum/utils/arg_validations.py @@ -35,4 +35,4 @@ def load_config(config_path: str) -> dict[str, Any] | None: config_data = json.load(file) return config_data except (FileNotFoundError, JSONDecodeError) as e: - pp.pretty_print(f"Failed to parse given config file {config_path}:\n{e}", pp.Colors.FAILURE) + pp.pprint(f"Failed to parse given config file {config_path}:\n{e}", pp.Colors.FAILURE) diff --git a/Quorum/utils/config.py b/Quorum/utils/config.py index 62b1dac..1910ee5 100644 --- a/Quorum/utils/config.py +++ b/Quorum/utils/config.py @@ -17,12 +17,9 @@ GROUND_TRUTH_PATH = MAIN_PATH / "ground_truth.json" -if not GROUND_TRUTH_PATH.exists(): - raise FileNotFoundError(f"Ground truth file not found at {GROUND_TRUTH_PATH}") - ANTHROPIC_API_KEY = os.getenv('ANTHROPIC_API_KEY') if not ANTHROPIC_API_KEY: - pp.pretty_print( + pp.pprint( "Warning: ANTHROPIC_API_KEY environment variable is not set. All dependent checks will be skipped.", pp.Colors.WARNING ) diff --git a/Quorum/utils/config_loader.py b/Quorum/utils/config_loader.py index 7b42e85..d19b6cc 100644 --- a/Quorum/utils/config_loader.py +++ b/Quorum/utils/config_loader.py @@ -4,10 +4,9 @@ import Quorum.utils.pretty_printer as pp import Quorum.apis.price_feeds as price_feeds + SUPPORTED_PROVIDERS = set(price_feeds.PriceFeedProvider.__members__.values()) -with open(config.GROUND_TRUTH_PATH) as f: - config_data = json.load(f) def load_customer_config(customer: str) -> Dict[str, any]: """ @@ -20,26 +19,35 @@ def load_customer_config(customer: str) -> Dict[str, any]: Returns: Dict[str, any]: The customer configuration data. """ + if not config.GROUND_TRUTH_PATH.exists(): + raise FileNotFoundError(f"Ground truth file not found at {config.GROUND_TRUTH_PATH}") + + with open(config.GROUND_TRUTH_PATH) as f: + config_data = json.load(f) + customer_config = config_data.get(customer) if not customer_config: - pp.pretty_print(f"Customer {customer} not found in ground truth data.", pp.Colors.FAILURE) + pp.pprint(f"Customer {customer} not found in ground truth data.", pp.Colors.FAILURE) raise ValueError(f"Customer {customer} not found in ground truth data.") - providers = customer_config.get("price_feed_providers", []) - providers += customer_config.get("token_validation_providers", []) - unsupported = set(providers) - SUPPORTED_PROVIDERS + price_feed_providers = customer_config.get("price_feed_providers", []) + token_providers = customer_config.get("token_validation_providers", []) + unsupported = set(price_feed_providers).union(token_providers) - SUPPORTED_PROVIDERS if unsupported: - pp.pretty_print(f"Unsupported providers for {customer}: {', '.join(unsupported)}", pp.Colors.FAILURE) - providers = list(set(providers) & SUPPORTED_PROVIDERS) - customer_config["price_feed_providers"] = providers + pp.pprint(f"Unsupported providers for {customer}: {', '.join(unsupported)}", pp.Colors.FAILURE) + price_feed_providers = list(set(price_feed_providers) & SUPPORTED_PROVIDERS) + token_providers = list(set(token_providers) & SUPPORTED_PROVIDERS) # Replace the provider names with the actual API objects - for i, provider in enumerate(providers): + for i, provider in enumerate(price_feed_providers): if provider == price_feeds.PriceFeedProvider.CHAINLINK: - providers[i] = price_feeds.ChainLinkAPI() + price_feed_providers[i] = price_feeds.ChainLinkAPI() elif provider == price_feeds.PriceFeedProvider.CHRONICLE: - providers[i] = price_feeds.ChronicleAPI() - elif provider == price_feeds.PriceFeedProvider.COINGECKO: - providers[i] = price_feeds.CoinGeckoAPI() + price_feed_providers[i] = price_feeds.ChronicleAPI() + + for i, provider in enumerate(token_providers): + if provider == price_feeds.PriceFeedProvider.COINGECKO: + token_providers[i] = price_feeds.CoinGeckoAPI() - customer_config["price_feed_providers"] = providers + customer_config["price_feed_providers"] = price_feed_providers + customer_config["token_validation_providers"] = token_providers return customer_config diff --git a/Quorum/utils/load_env.py b/Quorum/utils/load_env.py index dc6e1e8..8678f67 100644 --- a/Quorum/utils/load_env.py +++ b/Quorum/utils/load_env.py @@ -23,6 +23,6 @@ def load_env_variables(): # Print the user if any environment variables were overridden if overridden_vars: - pp.pretty_print("The following environment variables were overridden:", pp.Colors.WARNING) + pp.pprint("The following environment variables were overridden:", pp.Colors.WARNING) for key, values in overridden_vars.items(): - pp.pretty_print(f"{key}: {values['before']} -> {values['after']}", pp.Colors.WARNING) + pp.pprint(f"{key}: {values['before']} -> {values['after']}", pp.Colors.WARNING) diff --git a/Quorum/utils/pretty_printer.py b/Quorum/utils/pretty_printer.py index 16d3d4e..0d6cb84 100644 --- a/Quorum/utils/pretty_printer.py +++ b/Quorum/utils/pretty_printer.py @@ -1,4 +1,15 @@ from enum import StrEnum +from typing import Optional + + +SEPARATOR_LINE = '\n' + '-' * 110 + '\n' + + +class Heading(StrEnum): + HEADING_1 = '=' + HEADING_2 = '-' + HEADING_3 = '.' + class Colors(StrEnum): SUCCESS = '\033[92m' @@ -7,8 +18,9 @@ class Colors(StrEnum): INFO = '' RESET = '\033[0m' -def pretty_print(message: str, status: Colors): - separator_line = status + '-' * 80 + Colors.RESET - print(separator_line) - print(status + message + Colors.RESET) - print(separator_line) \ No newline at end of file + +def pprint(message: str, status: Colors, heading: Optional[Heading]=None): + s = status + message + Colors.RESET + if heading: + s += '\n' + heading * len(message) + '\n' + print(s) diff --git a/version b/version index 121eb39..f796d62 100644 --- a/version +++ b/version @@ -1 +1 @@ -20250106.191119.109042 +20250107.132006.402947