From 7458d67f0f8ff10bee115de02d280b509cd0c7cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Fri, 21 Jun 2024 22:57:11 +0200 Subject: [PATCH 01/10] Rewrite docstrings in Google style (#4450) Closes #4435 Blocked on: * #4449 Changes: - Adapt the Google style of docstrings to `pyk`. - Add `pydocstyle` to checks. Ensure that the codebase passes the check. Google style doscstrings can be rendered by the Napoleon plugin for Sphinx: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html. Example Google style docstrings: https://www.sphinx-doc.org/en/master/usage/extensions/example_google.html --- pyk/Makefile | 5 +- pyk/poetry.lock | 30 +++- pyk/pyproject.toml | 6 + pyk/src/pyk/__main__.py | 6 +- pyk/src/pyk/coverage.py | 38 +++-- pyk/src/pyk/cterm/cterm.py | 98 +++++++----- pyk/src/pyk/kast/formatter.py | 10 +- pyk/src/pyk/kast/inner.py | 119 +++++++++------ pyk/src/pyk/kast/lexer.py | 10 +- pyk/src/pyk/kast/manip.py | 148 ++++++++++++------- pyk/src/pyk/kast/outer.py | 25 ++-- pyk/src/pyk/kast/outer_lexer.py | 33 +++-- pyk/src/pyk/kast/pretty.py | 22 ++- pyk/src/pyk/kcfg/kcfg.py | 74 ++++------ pyk/src/pyk/kcfg/show.py | 1 - pyk/src/pyk/kllvm/hints/prooftrace.py | 147 +++++++++--------- pyk/src/pyk/konvert/_module_to_kore.py | 4 +- pyk/src/pyk/kore/rpc.py | 2 +- pyk/src/pyk/kore/syntax.py | 6 +- pyk/src/pyk/kore_exec_covr/__main__.py | 16 +- pyk/src/pyk/kore_exec_covr/kore_exec_covr.py | 23 +-- pyk/src/pyk/krepl/repl.py | 11 +- pyk/src/pyk/ktool/kprove.py | 24 +-- pyk/src/pyk/ktool/krun.py | 19 ++- pyk/src/pyk/proof/proof.py | 109 ++++++++------ pyk/src/pyk/proof/reachability.py | 5 +- 26 files changed, 563 insertions(+), 428 deletions(-) diff --git a/pyk/Makefile b/pyk/Makefile index 7e13281f315..f41d7c33b83 100644 --- a/pyk/Makefile +++ b/pyk/Makefile @@ -67,7 +67,7 @@ profile: poetry-install # Checks and formatting format: autoflake isort black -check: check-flake8 check-mypy check-autoflake check-isort check-black +check: check-flake8 check-mypy check-autoflake check-isort check-pydocstyle check-black check-flake8: poetry-install $(POETRY_RUN) flake8 src @@ -87,6 +87,9 @@ isort: poetry-install check-isort: poetry-install $(POETRY_RUN) isort --check src +check-pydocstyle: + $(POETRY_RUN) pydocstyle src + black: poetry-install $(POETRY_RUN) black src diff --git a/pyk/poetry.lock b/pyk/poetry.lock index c0e8e1a3092..d181bc4df4a 100644 --- a/pyk/poetry.lock +++ b/pyk/poetry.lock @@ -680,6 +680,23 @@ files = [ {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + [[package]] name = "pyflakes" version = "3.2.0" @@ -864,6 +881,17 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + [[package]] name = "textual" version = "0.27.0" @@ -982,4 +1010,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "887f594c54788d5dc671ec50bf1bbd5430d8d126e270ef6dc373f93a96bc8a5e" +content-hash = "1e8db8aceb9488a870758157f280a7cbff78677f44d6bedfa585f6bebdd5ffc5" diff --git a/pyk/pyproject.toml b/pyk/pyproject.toml index 400e0e2e674..5497ac8620f 100644 --- a/pyk/pyproject.toml +++ b/pyk/pyproject.toml @@ -39,6 +39,7 @@ flake8-type-checking = "*" isort = "*" mypy = "*" pep8-naming = "*" +pydocstyle = "*" pytest = "*" pytest-cov = "*" pytest-mock = "*" @@ -58,6 +59,11 @@ kore-exec-covr = "pyk.kore_exec_covr.__main__:main" [tool.poetry.plugins.pytest11] pytest-pyk = "pyk.testing.plugin" +[tool.pydocstyle] +convention = "google" +add-select = "D204,D401,D404" +add-ignore = "D1" + [tool.isort] profile = "black" line_length = 120 diff --git a/pyk/src/pyk/__main__.py b/pyk/src/pyk/__main__.py index 17d7cfc441a..a7b8d6143ea 100644 --- a/pyk/src/pyk/__main__.py +++ b/pyk/src/pyk/__main__.py @@ -218,9 +218,9 @@ def pretty_print_execute_response(execute_result: ExecuteResult) -> list[str]: def exec_rpc_kast(options: RPCKastOptions) -> None: - """ - Convert an 'execute' JSON RPC response to a new 'execute' or 'simplify' request, - copying parameters from a reference request. + """Convert an 'execute' JSON RPC response to a new 'execute' or 'simplify' request. + + Copies parameters from a reference request. """ reference_request = json.loads(options.reference_request_file.read()) input_dict = json.loads(options.response_file.read()) diff --git a/pyk/src/pyk/coverage.py b/pyk/src/pyk/coverage.py index 5b18d4173c5..f57498cbe52 100644 --- a/pyk/src/pyk/coverage.py +++ b/pyk/src/pyk/coverage.py @@ -16,14 +16,13 @@ def get_rule_by_id(definition: KDefinition, rule_id: str) -> KRule: """Get a rule from the definition by coverage rule id. - Input: + Args: + definition: JSON-encoded definition. + rule_id: String of unique rule identifier generated by `kompile --coverage`. - - definition: json encoded definition. - - rule_id: string of unique rule identifier generated by `kompile --coverage`. - - Output: json encoded rule which has identifier rule_id. + Returns: + JSON encoded rule which has identifier `rule_id`. """ - for module in definition.modules: for sentence in module.sentences: if type(sentence) is KRule: @@ -52,16 +51,15 @@ def translate_coverage( ) -> list[str]: """Translate the coverage data from one kompiled definition to another. - Input: - - - src_all_rules: contents of allRules.txt for definition which coverage was generated for. - - dst_all_rules: contents of allRules.txt for definition which you desire coverage for. - - dst_definition: JSON encoded definition of dst kompiled definition. - - src_rules_list: Actual coverage data produced. + Args: + src_all_rules: Contents of allRules.txt for definition which coverage was generated for. + dst_all_rules: Contents of allRules.txt for definition which you desire coverage for. + dst_definition: JSON encoded definition of dst kompiled definition. + src_rules_list: Actual coverage data produced. - Output: list of non-functional rules applied in dst definition translated from src definition. + Returns: + List of non-functional rules applied in dst definition translated from src definition. """ - # Load the src_rule_id -> src_source_location rule map from the src kompiled directory src_rule_map = {} for line in src_all_rules: @@ -110,13 +108,13 @@ def translate_coverage( def translate_coverage_from_paths(src_kompiled_dir: str, dst_kompiled_dir: str, src_rules_file: PathLike) -> list[str]: """Translate coverage information given paths to needed files. - Input: - - - src_kompiled_dir: Path to kompiled directory of source. - - dst_kompiled_dir: Path to kompiled directory of destination. - - src_rules_file: Path to generated rules coverage file. + Args: + src_kompiled_dir: Path to kompiled directory of source. + dst_kompiled_dir: Path to kompiled directory of destination. + src_rules_file: Path to generated rules coverage file. - Output: Translated list of rules with non-semantic rules stripped out. + Returns: + Translated list of rules with non-semantic rules stripped out. """ src_all_rules = [] with open(src_kompiled_dir + '/allRules.txt') as src_all_rules_file: diff --git a/pyk/src/pyk/cterm/cterm.py b/pyk/src/pyk/cterm/cterm.py index d16c710ccab..7fc790eced0 100644 --- a/pyk/src/pyk/cterm/cterm.py +++ b/pyk/src/pyk/cterm/cterm.py @@ -34,8 +34,7 @@ @dataclass(frozen=True, order=True) class CTerm: - """ - Represent a symbolic program state, obtained and manipulated using symbolic execution. + """Represent a symbolic program state, obtained and manipulated using symbolic execution. Contains the data: - `config`: the _configuration_ (structural component of the state, potentially containing free variabls) @@ -126,7 +125,7 @@ def kast(self) -> KInner: @cached_property def free_vars(self) -> frozenset[str]: - """Return the set of free variable names contained in this `CTerm`""" + """Return the set of free variable names contained in this `CTerm`.""" return frozenset(free_vars(self.kast)) @property @@ -188,16 +187,19 @@ def add_constraint(self, new_constraint: KInner) -> CTerm: def anti_unify( self, other: CTerm, keep_values: bool = False, kdef: KDefinition | None = None ) -> tuple[CTerm, CSubst, CSubst]: - """Given two `CTerm`, find a more general `CTerm` which can instantiate to both. + """Given two `CTerm` instances, find a more general `CTerm` which can instantiate to both. + + Args: + other: other `CTerm` to consider for finding a more general `CTerm` with this one. + keep_values: do not discard information about abstracted variables in returned result. + kdef (optional): `KDefinition` to make analysis more precise. - :param other: other `CTerm` to consider for finding a more general `CTerm` with this one. - :param keep_values: do not discard information about abstracted variables in returned result. - :param kdef: optional `KDefinition` to make analysis more precise. + Returns: + A tuple ``(cterm, csubst1, csubst2)`` where - :return: tuple `cterm: CTerm, csubst1: CSubst1, csubst2: CSubst2` such that: - - `cterm`: more general `CTerm` than either `self` or `other` - - `csubst1`: constrained substitution to apply to `cterm` to obtain `self` - - `csubst2`: constrained substitution to apply to `cterm` to obtain `other` + - ``cterm``: More general `CTerm` than either `self` or `other`. + - ``csubst1``: Constrained substitution to apply to `cterm` to obtain `self`. + - ``csubst2``: Constrained substitution to apply to `cterm` to obtain `other`. """ new_config, self_subst, other_subst = anti_unify(self.config, other.config, kdef=kdef) common_constraints = [constraint for constraint in self.constraints if constraint in other.constraints] @@ -240,8 +242,11 @@ def anti_unify( def remove_useless_constraints(self, keep_vars: Iterable[str] = ()) -> CTerm: """Return a new `CTerm` with constraints over unbound variables removed. - :param keep_vars: List of variables to keep constraints for even if unbound in the `CTerm`. - :return: A `CTerm` with the constraints over unbound variables removed. + Args: + keep_vars: List of variables to keep constraints for even if unbound in the `CTerm`. + + Returns: + A `CTerm` with the constraints over unbound variables removed. """ initial_vars = free_vars(self.config) | set(keep_vars) new_constraints = remove_useless_constraints(self.constraints, initial_vars) @@ -251,15 +256,20 @@ def remove_useless_constraints(self, keep_vars: Iterable[str] = ()) -> CTerm: def anti_unify(state1: KInner, state2: KInner, kdef: KDefinition | None = None) -> tuple[KInner, Subst, Subst]: """Return a generalized state over the two input states. - :param state1: State to generalize over, represented as bare `KInner`. - **Assumption** is that this is a bare configuration with no constraints attached. - :param state2: State to generalize over, represented as bare `KInner`. - **Assumption** is that this is a bare configuration with no constraints attached. - :param kdef: Optional `KDefinition` to make the analysis more precise. - :return: tuple `state: KInner, subst1: Subst, subst2: Subst` such that: - - `state`: a symbolic state represented as `KInner` which is more general than `state1` or `state2`. - - `subst1`: a `Subst` which when applied to `state` recovers `state1`. - - `subst2`: a `Subst` which when applied to `state` recovers `state2`. + Args: + state1: State to generalize over, represented as bare `KInner`. + state2: State to generalize over, represented as bare `KInner`. + kdef (optional): `KDefinition` to make the analysis more precise. + + Note: + Both `state1` and `state2` are expected to be bare configurations with no constraints attached. + + Returns: + A tuple ``(state, subst1, subst2)`` such that + + - ``state``: A symbolic state represented as `KInner` which is more general than `state1` or `state2`. + - ``subst1``: A `Subst` which, when applied to `state`, recovers `state1`. + - ``subst2``: A `Subst` which, when applied to `state`, recovers `state2`. """ def _rewrites_to_abstractions(_kast: KInner) -> KInner: @@ -279,8 +289,7 @@ def _rewrites_to_abstractions(_kast: KInner) -> KInner: @dataclass(frozen=True, order=True) class CSubst: - """ - Store information about instantiation of a symbolic state (`CTerm`) to a more specific one. + """Store information about instantiation of a symbolic state (`CTerm`) to a more specific one. Contains the data: - `subst`: assignment to apply to free variables in the state to achieve more specific one @@ -333,13 +342,19 @@ def cterm_build_claim( ) -> tuple[KClaim, Subst]: """Return a `KClaim` between the supplied initial and final states. - :param claim_id: Label to give the claim. - :param init_cterm: State to put on LHS of the rule (constraints interpreted as `requires` clause). - :param final_cterm: State to put on RHS of the rule (constraints interpreted as `ensures` clause). - :param keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. - :return: tuple `claim: KClaim, var_map: Subst`: - - `claim`: A `KClaim` with variable naming conventions applied so that it should be parseable by K frontend. - - `var_map`: The variable renamings that happened to make the claim parseable by K frontend (which can be undone to recover original variables). + Args: + claim_id: Label to give the claim. + init_cterm: State to put on LHS of the rule (constraints interpreted as `requires` clause). + final_cterm: State to put on RHS of the rule (constraints interpreted as `ensures` clause). + keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. + + Returns: + A tuple ``(claim, var_map)`` where + + - ``claim``: A `KClaim` with variable naming conventions applied + so that it should be parseable by the K Frontend. + - ``var_map``: The variable renamings applied to make the claim parseable by the K Frontend + (which can be undone to recover original variables). """ init_config, *init_constraints = init_cterm final_config, *final_constraints = final_cterm @@ -355,14 +370,19 @@ def cterm_build_rule( ) -> tuple[KRule, Subst]: """Return a `KRule` between the supplied initial and final states. - :param rule_id: Label to give the rule. - :param init_cterm: State to put on LHS of the rule (constraints interpreted as `requires` clause). - :param final_cterm: State to put on RHS of the rule (constraints interpreted as `ensures` clause). - :param priority: Rule priority to give to the generated `KRule`. - :param keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. - :return: tuple `claim: KRule, var_map: Subst` such that: - - `rule`: A `KRule` with variable naming conventions applied so that it should be parseable by K frontend. - - `var_map`: The variable renamings that happened to make the claim parseable by K frontend (which can be undone to recover original variables). + Args: + rule_id: Label to give the rule. + init_cterm: State to put on LHS of the rule (constraints interpreted as `requires` clause). + final_cterm: State to put on RHS of the rule (constraints interpreted as `ensures` clause). + keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. + + Returns: + A tuple ``(rule, var_map)`` where + + - ``rule``: A `KRule` with variable naming conventions applied + so that it should be parseable by the K Frontend. + - ``var_map``: The variable renamings applied to make the rule parseable by the K Frontend + (which can be undone to recover original variables). """ init_config, *init_constraints = init_cterm final_config, *final_constraints = final_cterm diff --git a/pyk/src/pyk/kast/formatter.py b/pyk/src/pyk/kast/formatter.py index 64cb4636b2b..10d4b4f5c71 100644 --- a/pyk/src/pyk/kast/formatter.py +++ b/pyk/src/pyk/kast/formatter.py @@ -173,7 +173,10 @@ def _between_terminals(definition: KDefinition, parent: KApply, index: int) -> b def _associativity_wrong(definition: KDefinition, parent: KApply, term: KApply, index: int) -> bool: - """A left (right) associative symbol cannot appear as the rightmost (leftmost) child of a symbol with equal priority.""" + """Return whether `term` can appear as the `index`-th child of `parent` according to associativity rules. + + A left (right) associative symbol cannot appear as the rightmost (leftmost) child of a symbol with equal priority. + """ parent_label = parent.label.name term_label = term.label.name prod = definition.symbols[parent_label] @@ -185,7 +188,10 @@ def _associativity_wrong(definition: KDefinition, parent: KApply, term: KApply, def _priority_wrong(definition: KDefinition, parent: KApply, term: KApply) -> bool: - """A symbol with a lesser priority cannot appear as the child of a symbol with greater priority.""" + """Return whether `term` can appear as a child of `parent` according to priority rules. + + A symbol with a lesser priority cannot appear as the child of a symbol with greater priority. + """ parent_label = parent.label.name term_label = term.label.name return term_label in definition.priorities.get(parent_label, ()) diff --git a/pyk/src/pyk/kast/inner.py b/pyk/src/pyk/kast/inner.py index 0af7ab12ba9..b27a0d3153e 100644 --- a/pyk/src/pyk/kast/inner.py +++ b/pyk/src/pyk/kast/inner.py @@ -198,11 +198,13 @@ def map_inner(self: KI, f: Callable[[KInner], KInner]) -> KI: @abstractmethod def match(self, term: KInner) -> Subst | None: - """ - Perform syntactic pattern matching and return the substitution. + """Perform syntactic pattern matching and return the substitution. + + Args: + term: Term to match. - :param term: Term to match. - :return: Substitution instantiating self to the term. + Returns: + A substitution instantiating `self` to `term` if one exists, ``None`` otherwise. """ ... @@ -530,11 +532,13 @@ def match(self, term: KInner) -> Subst | None: return None def apply_top(self, term: KInner) -> KInner: - """ - Rewrite a given term at the top + """Rewrite a given term at the top. - :param term: Term to rewrite. - :return: The term with the rewrite applied once at the top. + Args: + term: Term to rewrite. + + Returns: + The term with the rewrite applied once at the top. """ subst = self.lhs.match(term) if subst is not None: @@ -542,11 +546,13 @@ def apply_top(self, term: KInner) -> KInner: return term def apply(self, term: KInner) -> KInner: - """ - Attempt rewriting once at every position in a term bottom-up. + """Attempt rewriting once at every position in a term bottom-up. - :param term: Term to rewrite. - :return: The term with rewrites applied at every node once starting from the bottom. + Args: + term: Term to rewrite. + + Returns: + The term with rewrites applied at every node once starting from the bottom. """ return bottom_up(self.apply_top, term) @@ -776,12 +782,14 @@ def is_identity(self) -> bool: def bottom_up_with_summary(f: Callable[[KInner, list[A]], tuple[KInner, A]], kinner: KInner) -> tuple[KInner, A]: - """ - Traverse a term from the bottom moving upward, potentially both transforming it and collecting information about it. + """Traverse a term from the bottom moving upward, collecting information about it. + + Args: + f: Function to apply at each AST node to transform it and collect summary. + kinner: Term to apply this transformation to. - :param f: Function to apply at each AST node to transform it and collect summary. - :param kinner: KInner to apply this transformation to. - :return: A tuple of the transformed term and the summarized results. + Returns: + A tuple of the transformed term and the summarized results. """ stack: list = [kinner, [], []] while True: @@ -806,12 +814,14 @@ def bottom_up_with_summary(f: Callable[[KInner, list[A]], tuple[KInner, A]], kin # TODO make method of KInner def bottom_up(f: Callable[[KInner], KInner], kinner: KInner) -> KInner: - """ - Traverse a term from the bottom moving upward, updating it using a given transformation. + """Transform a term from the bottom moving upward. + + Args: + f: Function to apply to each node in the term. + kinner: Original term to transform. - :param f: Transformation to apply at each node in the term. - :param kinner: Original term to transform. - :return: The transformed term. + Returns: + The transformed term. """ stack: list = [kinner, []] while True: @@ -832,12 +842,14 @@ def bottom_up(f: Callable[[KInner], KInner], kinner: KInner) -> KInner: # TODO make method of KInner def top_down(f: Callable[[KInner], KInner], kinner: KInner) -> KInner: - """ - Traverse a term from the top moving downward, updating it using a given transformation. + """Transform a term from the top moving downward. + + Args: + f: Function to apply to each node in the term. + kinner: Original term to transform. - :param f: Transformation to apply at each node in the term. - :param kinner: Original term to transform. - :return: The transformed term. + Returns: + The transformed term. """ stack: list = [f(kinner), []] while True: @@ -858,11 +870,13 @@ def top_down(f: Callable[[KInner], KInner], kinner: KInner) -> KInner: # TODO: make method of KInner def var_occurrences(term: KInner) -> dict[str, list[KVariable]]: - """ - Collect the list of occurrences of each variable in a given term. + """Collect the list of occurrences of each variable in a given term. - :param term: Term to collect variables from. - :return: Dictionary with keys a variable names and value as list of all occurrences of that variable. + Args: + term: Term to collect variables from. + + Returns: + A dictionary with variable names as keys and the list of all occurrences of the variable as values. """ _var_occurrences: dict[str, list[KVariable]] = {} @@ -879,10 +893,11 @@ def _var_occurence(_term: KInner) -> None: # TODO replace by method that does not reconstruct the AST def collect(callback: Callable[[KInner], None], kinner: KInner) -> None: - """ - Collect information about a given term when traversing it bottom up using a side-effect function. + """Collect information about a given term traversing it bottom-up using a function with side effects. - :param callback: Function supplied by user which has side-effect of collecting desired information at each AST node. + Args: + callback: Function with the side effect of collecting desired information at each AST node. + kinner: The term to traverse. """ def f(kinner: KInner) -> KInner: @@ -893,13 +908,15 @@ def f(kinner: KInner) -> KInner: def build_assoc(unit: KInner, label: str | KLabel, terms: Iterable[KInner]) -> KInner: - """ - Build an associative list. + """Build an associative list. + + Args: + unit: The empty variant of the given list type. + label: The associative list join operator. + terms: List (potentially empty) of terms to join in an associative list. - :param unit: The empty variant of the given list type. - :param label: The associative list join operator. - :param terms: List (potentially empty) of terms to join in an associative list. - :return: The list of terms joined using the supplied label, or the unit element in the case of no terms. + Returns: + The list of terms joined using the supplied label, or the unit element in the case of no terms. """ _label = label if type(label) is KLabel else KLabel(label) res: KInner | None = None @@ -914,13 +931,15 @@ def build_assoc(unit: KInner, label: str | KLabel, terms: Iterable[KInner]) -> K def build_cons(unit: KInner, label: str | KLabel, terms: Iterable[KInner]) -> KInner: - """ - Build a cons list. + """Build a cons list. - :param unit: The empty variant of the given list type. - :param label: The associative list join operator. - :param terms: List (potentially empty) of terms to join in an associative list. - :return: The list of terms joined using the supplied label, terminated with the unit element. + Args: + unit: The empty variant of the given list type. + label: The associative list join operator. + terms: List (potentially empty) of terms to join in an associative list. + + Returns: + The list of terms joined using the supplied label, terminated with the unit element. """ it = iter(terms) try: @@ -933,8 +952,12 @@ def build_cons(unit: KInner, label: str | KLabel, terms: Iterable[KInner]) -> KI def flatten_label(label: str, kast: KInner) -> list[KInner]: """Given a cons list, return a flat Python list of the elements. - - Input: Cons operation to flatten. - - Output: Items of cons list. + Args: + label: The cons operator. + kast: The cons list to flatten. + + Returns: + Items of cons list. """ flattened_args = [] rest_of_args = [kast] # Rest of arguments in reversed order diff --git a/pyk/src/pyk/kast/lexer.py b/pyk/src/pyk/kast/lexer.py index 91690672bf2..caa814ac127 100644 --- a/pyk/src/pyk/kast/lexer.py +++ b/pyk/src/pyk/kast/lexer.py @@ -145,7 +145,10 @@ def _kseq(la: str, it: Iterator[str]) -> tuple[Token, str]: def _id_or_token(la: str, it: Iterator[str]) -> tuple[Token, str]: - """[#a-z](a-zA-Z0-9)*""" + """Match an ID or token. + + Corresponds to regex: [#a-z](a-zA-Z0-9)* + """ assert la == '#' or la in _LOWER buf = [la] la = next(it, '') @@ -162,7 +165,10 @@ def _id_or_token(la: str, it: Iterator[str]) -> tuple[Token, str]: def _variable(la: str, it: Iterator[str]) -> tuple[Token, str]: - r"""_ | \?_ | \??_?[A-Z][a-zA-Z0-9'_]*""" + r"""Match a variable. + + Corresponds to regex: _ | \?_ | \??_?[A-Z][a-zA-Z0-9'_]* + """ assert la == '?' or la == '_' or la in _UPPER # States: diff --git a/pyk/src/pyk/kast/manip.py b/pyk/src/pyk/kast/manip.py index 0d3ec94e165..fc453654d33 100644 --- a/pyk/src/pyk/kast/manip.py +++ b/pyk/src/pyk/kast/manip.py @@ -336,8 +336,11 @@ def _replace_with_var(k: KInner) -> KInner: def collapse_dots(kast: KInner) -> KInner: """Given a configuration with structural frames `...`, minimize the structural frames needed. - - Input: a configuration, potentially with structural frames. - - Output: the same configuration, with the amount of structural framing minimized. + Args: + kast: A configuration, potentially with structural frames. + + Returns: + The same configuration, with the amount of structural framing minimized. """ def _collapse_dots(_kast: KInner) -> KInner: @@ -397,10 +400,13 @@ def _push_down_rewrites(_kast: KInner) -> KInner: def inline_cell_maps(kast: KInner) -> KInner: - """Ensure that cell map collections are printed nicely, not as Maps." + """Ensure that cell map collections are printed nicely, not as Maps. - - Input: kast term. - - Output: kast term with cell maps inlined. + Args: + kast: A KAST term. + + Returns: + The KAST term with cell maps inlined. """ def _inline_cell_maps(_kast: KInner) -> KInner: @@ -416,8 +422,11 @@ def _inline_cell_maps(_kast: KInner) -> KInner: def remove_semantic_casts(kast: KInner) -> KInner: """Remove injected `#SemanticCast*` nodes in AST. - - Input: kast (possibly) containing automatically injected `#SemanticCast*` KApply nodes. - - Output: kast without the `#SemanticCast*` nodes. + Args: + kast: A term (possibly) containing automatically injected `#SemanticCast*` KApply nodes. + + Returns: + The term without the `#SemanticCast*` nodes. """ def _remove_semtnaic_casts(_kast: KInner) -> KInner: @@ -431,8 +440,12 @@ def _remove_semtnaic_casts(_kast: KInner) -> KInner: def useless_vars_to_dots(kast: KInner, keep_vars: Iterable[str] = ()) -> KInner: """Structurally abstract away useless variables. - - Input: kast term, and a requires clause and ensures clause. - - Output: kast term with the useless vars structurally abstracted. + Args: + kast: A term. + keep_vars: Iterable of variables to keep. + + Returns: + The term with the useless varables structurally abstracted. """ num_occs = count_vars(kast) + Counter(keep_vars) @@ -453,8 +466,12 @@ def _collapse_useless_vars(_kast: KInner) -> KInner: def labels_to_dots(kast: KInner, labels: Collection[str]) -> KInner: """Abstract specific labels for printing. - - Input: kast term, and list of labels to abstract. - - Output: kast term with those labels abstracted. + Args: + kast: A term. + labels: List of labels to abstract. + + Returns + The term with `labels` abstracted. """ def _labels_to_dots(k: KInner) -> KInner: @@ -500,13 +517,16 @@ def minimize_term( ) -> KInner: """Minimize a K term for pretty-printing. - - Input: kast term, and optionally requires and ensures clauses with constraints. - - Output: kast term minimized. - - Variables only used once will be removed. - - Unused cells will be abstracted. - - Attempt to remove useless conditions. - """ + - Variables only used once will be removed. + - Unused cells will be abstracted. + - Useless conditions will be attempted to be removed. + + Args: + kast: A term. + Returns: + The term, minimized. + """ term = inline_cell_maps(term) term = remove_semantic_casts(term) term = useless_vars_to_dots(term, keep_vars=keep_vars) @@ -524,11 +544,15 @@ def minimize_term( def minimize_rule(rule: RL, keep_vars: Iterable[str] = ()) -> RL: """Minimize a K rule or claim for pretty-printing. - - Input: kast representing a K rule or claim. - - Output: kast with the rule or claim minimized: - - Variables only used once will be removed. - - Unused cells will be abstracted. - - Attempt to remove useless side-conditions. + - Variables only used once will be removed. + - Unused cells will be abstracted. + - Useless side-conditions will be attempted to be removed. + + Args: + rule: A K rule or claim. + + Returns: + The rule or claim, minimized. """ body = rule.body requires = rule.requires @@ -562,8 +586,11 @@ def remove_attr(term: KInner) -> KInner: def remove_generated_cells(term: KInner) -> KInner: """Remove and from a configuration. - - Input: Constrained term. - - Output: Constrained term with those cells removed. + Args: + term: A term. + + Returns: + The term with those cells removed. """ rewrite = KRewrite(KApply('', [KVariable('CONFIG'), KVariable('_')]), KVariable('CONFIG')) return rewrite(term) @@ -661,11 +688,15 @@ def normalize_constraints(constraints: Iterable[KInner]) -> tuple[KInner, ...]: def remove_useless_constraints(constraints: Iterable[KInner], initial_vars: Iterable[str]) -> list[KInner]: - """Given a list of constraints and a list of variables, return an updated list with only constraints that depend on these variables (directly or indirectly). + """Remove constraints that do not depend on a given iterable of variables (directly or indirectly). - :param constraints: Original list of constraints to remove from. - :param initial_vars: Initial list of variables to keep constraints for. - :return: A list of constraints with only those constraints that contain the initial variables or variables that depend on those through other constraints in the list. + Args: + constraints: Iterable of constraints to filter. + initial_vars: Initial iterable of variables to keep constraints for. + + Returns: + A list of constraints with only those constraints that contain the initial variables, + or variables that depend on those through other constraints in the list. """ used_vars = list(initial_vars) prev_len_used_vars = 0 @@ -692,15 +723,21 @@ def build_claim( ) -> tuple[KClaim, Subst]: """Return a `KClaim` between the supplied initial and final states. - :param claim_id: Label to give the claim. - :param init_config: State to put on LHS of the rule. - :param final_config: State to put on RHS of the rule. - :param init_constraints: Constraints to use as `requires` clause. - :param final_constraints: Constraints to use as `ensures` clause. - :param keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. - :return: tuple `claim: KClaim, var_map: Subst`: - - `claim`: A `KClaim` with variable naming conventions applied so that it should be parseable by K frontend. - - `var_map`: The variable renamings that happened to make the claim parseable by K frontend (which can be undone to recover original variables). + Args: + claim_id: Label to give the claim. + init_config: State to put on LHS of the rule. + final_config: State to put on RHS of the rule. + init_constraints: Constraints to use as `requires` clause. + final_constraints: Constraints to use as `ensures` clause. + keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. + + Returns: + A tuple ``(claim, var_map)`` where + + - ``claim``: A `KClaim` with variable naming conventions applied + so that it should be parseable by the K Frontend. + - ``var_map``: The variable renamings applied to make the claim parseable by the K Frontend + (which can be undone to recover the original variables). """ rule, var_map = build_rule( claim_id, init_config, final_config, init_constraints, final_constraints, keep_vars=keep_vars @@ -720,16 +757,21 @@ def build_rule( ) -> tuple[KRule, Subst]: """Return a `KRule` between the supplied initial and final states. - :param rule_id: Label to give the rule. - :param init_config: State to put on LHS of the rule. - :param final_config: State to put on RHS of the rule. - :param init_constraints: Constraints to use as `requires` clause. - :param final_constraints: Constraints to use as `ensures` clause. - :param priority: Rule priority to give to the generated `KRule`. - :param keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. - :return: tuple `claim: KRule, var_map: Subst` such that: - - `rule`: A `KRule` with variable naming conventions applied so that it should be parseable by K frontend. - - `var_map`: The variable renamings that happened to make the claim parseable by K frontend (which can be undone to recover original variables). + Args: + rule_id: Label to give the rule. + init_config: State to put on LHS of the rule. + final_config: State to put on RHS of the rule. + init_constraints: Constraints to use as `requires` clause. + final_constraints: Constraints to use as `ensures` clause. + keep_vars: Variables to leave in the side-conditions even if not bound in the configuration. + + Returns: + A tuple ``(rule, var_map)`` where + + - ``rule``: A `KRule` with variable naming conventions applied + so that it should be parseable by the K Frontend. + - ``var_map``: The variable renamings applied to make the rule parseable by the K Frontend + (which can be undone to recover the original variables). """ init_constraints = [normalize_ml_pred(c) for c in init_constraints] final_constraints = [normalize_ml_pred(c) for c in final_constraints] @@ -785,17 +827,17 @@ def _replace_rewrites_with_implies(_kast: KInner) -> KInner: def no_cell_rewrite_to_dots(term: KInner) -> KInner: - """ - Transforms a given term by replacing the contents of each cell with dots if the LHS and RHS are the same. + """Transform a given term by replacing the contents of each cell with dots if the LHS and RHS are the same. - This function recursively traverses the cells in a term. When it finds a cell whose left-hand side (LHS) is identical to its right-hand side (RHS), + This function recursively traverses the cells in a term. + When it finds a cell whose left-hand side (LHS) is identical to its right-hand side (RHS), it replaces the cell's contents with a predefined DOTS. - Parameters: - - term (KInner): The term to be transformed. + Args: + term: The term to be transformed. Returns: - - KInner: The transformed term, where specific cell contents have been replaced with dots. + The transformed term, where specific cell contents have been replaced with dots. """ def _no_cell_rewrite_to_dots(_term: KInner) -> KInner: diff --git a/pyk/src/pyk/kast/outer.py b/pyk/src/pyk/kast/outer.py index 1cd84118ab1..f6c6dfa7a07 100644 --- a/pyk/src/pyk/kast/outer.py +++ b/pyk/src/pyk/kast/outer.py @@ -293,11 +293,11 @@ def argument_sorts(self) -> list[KSort]: @cached_property def is_prefix(self) -> bool: - """ - The production is of the form `t* "(" (n ("," n)*)? ")"`. + """The production is of the form ``t* "(" (n ("," n)*)? ")"``. + + Here, ``t`` is a terminal other than ``"("``, ``","`` or ``")"``, and ``n`` a non-terminal. - Here, `t` is a terminal other than `"("`, `","` or `")"`, and `n` a non-terminal. - Example: `Int ::= "mul" "(" Int "," Int ")"` + Example: ``syntax Int ::= "mul" "(" Int "," Int ")"`` """ def encode(item: KProductionItem) -> str: @@ -319,7 +319,7 @@ def encode(item: KProductionItem) -> str: @cached_property def is_record(self) -> bool: - """The production is prefix with labelled nonterminals""" + """The production is prefix with labelled nonterminals.""" return bool(self.is_prefix and self.non_terminals and all(item.name is not None for item in self.non_terminals)) @property @@ -1156,7 +1156,7 @@ def sentence_by_unique_id(self) -> dict[str, KSentence]: return unique_id_map def production_for_cell_sort(self, sort: KSort) -> KProduction: - """Returns the production for a given cell-declaration syntax from the cell's declared sort.""" + """Return the production for a given cell-declaration syntax from the cell's declared sort.""" # Typical cell production has 3 productions: # syntax KCell ::= "project:KCell" "(" K ")" [function, projection] # syntax KCell ::= "initKCell" "(" Map ")" [function, initializer, noThread] @@ -1174,7 +1174,7 @@ def production_for_cell_sort(self, sort: KSort) -> KProduction: raise ValueError(f'Expected a single cell production for sort {sort}') from err def module(self, name: str) -> KFlatModule: - """Returns the module associated with a given name.""" + """Return the module associated with a given name.""" return self.all_modules_dict[name] @cached_property @@ -1302,7 +1302,7 @@ def insert(dct: dict[str, set[str]], *, key: str, value: str) -> dict[str, set[s return reduce(lambda res, pair: insert(res, key=pair[0], value=pair[1]), pairs, {}) def sort(self, kast: KInner) -> KSort | None: - """Computes the sort of a given term using best-effort simple sorting algorithm, returns `None` on algorithm failure.""" + """Compute the sort of a given term using best-effort simple sorting algorithm, returns `None` on algorithm failure.""" match kast: case KToken(_, sort) | KVariable(_, sort): return sort @@ -1338,7 +1338,7 @@ def resolve(sort: KSort) -> KSort: return resolve(prod.sort), tuple(resolve(sort) for sort in prod.argument_sorts) def least_common_supersort(self, sort1: KSort, sort2: KSort) -> KSort | None: - """Computes the lowest-upper-bound of two sorts in the sort lattice using very simple approach, returning `None` on failure (not necessarily meaning there isn't a lub).""" + """Compute the lowest-upper-bound of two sorts in the sort lattice using very simple approach, returning `None` on failure (not necessarily meaning there isn't a lub).""" if sort1 == sort2: return sort1 if sort1 in self.subsorts(sort2): @@ -1350,7 +1350,7 @@ def least_common_supersort(self, sort1: KSort, sort2: KSort) -> KSort | None: return None def greatest_common_subsort(self, sort1: KSort, sort2: KSort) -> KSort | None: - """Computes the greatest-lower-bound of two sorts in the sort lattice using very simple approach, returning `None` on failure (not necessarily meaning there isn't a glb).""" + """Compute the greatest-lower-bound of two sorts in the sort lattice using very simple approach, returning `None` on failure (not necessarily meaning there isn't a glb).""" if sort1 == sort2: return sort1 if sort1 in self.subsorts(sort2): @@ -1382,7 +1382,6 @@ def _add_ksequence_under_k_productions(_kast: KInner) -> KInner: def sort_vars(self, kast: KInner, sort: KSort | None = None) -> KInner: """Return the original term with all the variables having there sorts added or specialized, failing if recieving conflicting sorts for a given variable.""" - if type(kast) is KVariable and kast.sort is None and sort is not None: return kast.let(sort=sort) @@ -1487,7 +1486,6 @@ def _add_sort_params(_k: KInner) -> KInner: def add_cell_map_items(self, kast: KInner) -> KInner: """Wrap cell-map items in the syntactical wrapper that the frontend generates for them (see `KDefinition.remove_cell_map_items`).""" - # example: # syntax AccountCellMap [cellCollection, hook(MAP.Map)] # syntax AccountCellMap ::= AccountCellMap AccountCellMap [assoc, avoid, cellCollection, comm, element(AccountCellMapItem), function, hook(MAP.concat), unit(.AccountCellMap), wrapElement()] @@ -1508,7 +1506,6 @@ def _wrap_elements(_k: KInner) -> KInner: def remove_cell_map_items(self, kast: KInner) -> KInner: """Remove cell-map syntactical wrapper items that the frontend generates (see `KDefinition.add_cell_map_items`).""" - # example: # syntax AccountCellMap [cellCollection, hook(MAP.Map)] # syntax AccountCellMap ::= AccountCellMap AccountCellMap [assoc, avoid, cellCollection, comm, element(AccountCellMapItem), function, hook(MAP.concat), unit(.AccountCellMap), wrapElement()] @@ -1533,7 +1530,6 @@ def _wrap_elements(_k: KInner) -> KInner: def empty_config(self, sort: KSort) -> KInner: """Given a cell-sort, compute an "empty" configuration for it (all the constructor structure of the configuration in place, but variables in cell positions).""" - if sort not in self._empty_config: self._empty_config[sort] = self._compute_empty_config(sort) return self._empty_config[sort] @@ -1581,7 +1577,6 @@ def _cell_vars_to_labels(_kast: KInner) -> KInner: def init_config(self, sort: KSort) -> KInner: """Return an initialized configuration as the user declares it in the semantics, complete with configuration variables in place.""" - if sort not in self._init_config: self._init_config[sort] = self._compute_init_config(sort) return self._init_config[sort] diff --git a/pyk/src/pyk/kast/outer_lexer.py b/pyk/src/pyk/kast/outer_lexer.py index ecb43ffffb3..ec8d12ae3f3 100644 --- a/pyk/src/pyk/kast/outer_lexer.py +++ b/pyk/src/pyk/kast/outer_lexer.py @@ -191,7 +191,7 @@ class State(Enum): class LocationIterator(Iterator[str]): - """A string iterator which tracks the line and column information of the characters in the string""" + """A string iterator which tracks the line and column information of the characters in the string.""" _line: int _col: int @@ -215,7 +215,7 @@ def __next__(self) -> str: @property def loc(self) -> Loc: - """Returns the line,column of the last character returned by the iterator + """Return the ``(line, column)`` of the last character returned by the iterator. If no character has been returned yet, it will be the location that this iterator was initialized with. The default is (1,0), which is the only @@ -523,8 +523,10 @@ def _hash_upper_id(la: str, it: Iterator[str]) -> tuple[str, TokenType, str]: def _modname(la: str, it: LocationIterator) -> tuple[Token, str]: - r"""[a-zA-Z]\w*(-\w+)*""" + r"""Match a module name. + Corresponds to regex: [a-zA-Z]\w*(-\w+)* + """ la = _skip_ws_and_comments(la, it) consumed = [] @@ -888,18 +890,21 @@ def _attr_content(la: str, it: Iterator[str]) -> tuple[str, str]: def _maybe_comment(la: str, it: Iterator[str]) -> tuple[bool, list[str], str]: - """ - Attempt to consume a line or block comment from the iterator. - Expects la to be '/'. - - :param la: The current lookahead. - :param it: The iterator. - :return: A tuple `(success, consumed, la)` where - * `success` indicates whether `consumed` is a comment - * `consumed` is the list of consumed characters - * `la` is the current lookahead - """ + """Attempt to consume a line or block comment from the iterator. + + Expects la to be ``'/'``. + Args: + la: The current lookahead. + it: The iterator. + + Returns: + A tuple ``(success, consumed, la)`` where + + - ``success``: Indicates whether `consumed` is a comment. + - ``consumed``: The list of consumed characters. + - ``la``: The current lookahead. + """ assert la == '/' consumed = [la] # ['/'] diff --git a/pyk/src/pyk/kast/pretty.py b/pyk/src/pyk/kast/pretty.py index 13fedd04036..7ff0e17c017 100644 --- a/pyk/src/pyk/kast/pretty.py +++ b/pyk/src/pyk/kast/pretty.py @@ -78,8 +78,12 @@ def symbol_table(self) -> SymbolTable: def print(self, kast: KAst) -> str: """Print out KAST terms/outer syntax. - - Input: KAST term. - - Output: Best-effort string representation of KAST term. + + Args: + kast: KAST term to print. + + Returns: + Best-effort string representation of KAST term. """ _LOGGER.debug(f'Unparsing: {kast}') if type(kast) is KAtt: @@ -321,8 +325,11 @@ def _print_kdefinition(self, kdefinition: KDefinition) -> str: def _print_kast_bool(self, kast: KAst) -> str: """Print out KAST requires/ensures clause. - - Input: KAST Bool for requires/ensures clause. - - Output: Best-effort string representation of KAST term. + Args: + kast: KAST Bool for requires/ensures clause. + + Returns: + Best-effort string representation of KAST term. """ _LOGGER.debug(f'_print_kast_bool: {kast}') if type(kast) is KApply and kast.label.name in ['_andBool_', '_orBool_']: @@ -356,8 +363,11 @@ def build_symbol_table( ) -> SymbolTable: """Build the unparsing symbol table given a JSON encoded definition. - - Input: JSON encoded K definition. - - Return: Python dictionary mapping klabels to automatically generated unparsers. + Args: + definition: JSON encoded K definition. + + Returns: + Python dictionary mapping klabels to automatically generated unparsers. """ symbol_table = {} all_modules = list(definition.all_modules) + ([] if extra_modules is None else list(extra_modules)) diff --git a/pyk/src/pyk/kcfg/kcfg.py b/pyk/src/pyk/kcfg/kcfg.py index 3d0345d1e1d..f8c4dfe479b 100644 --- a/pyk/src/pyk/kcfg/kcfg.py +++ b/pyk/src/pyk/kcfg/kcfg.py @@ -930,16 +930,14 @@ def split_on_constraints(self, source_id: NodeIdLike, constraints: Iterable[KInn def lift_edge(self, b_id: NodeIdLike) -> None: """Lift an edge up another edge directly preceding it. - Input: - - b_id: the identifier of the central node `B` of a sequence of edges `A --> B --> C`. + `A --M steps--> B --N steps--> C` becomes `A --(M + N) steps--> C`. Node `B` is removed. - Effect: `A --M steps--> B --N steps--> C` becomes `A --(M + N) steps--> C`. Node `B` is removed. + Args: + b_id: the identifier of the central node `B` of a sequence of edges `A --> B --> C`. - Output: None - - Raises: An `AssertionError` if the edges in question are not in place. + Raises: + AssertionError: If the edges in question are not in place. """ - # Obtain edges `A -> B`, `B -> C` a_to_b = single(self.edges(target_id=b_id)) b_to_c = single(self.edges(source_id=b_id)) @@ -951,18 +949,15 @@ def lift_edge(self, b_id: NodeIdLike) -> None: def lift_edges(self) -> bool: """Perform all possible edge lifts across the KCFG. + The KCFG is transformed to an equivalent in which no further edge lifts are possible. + Given the KCFG design, it is not possible for one edge lift to either disallow another or allow another that was not previously possible. Therefore, this function is guaranteed to lift all possible edges without having to loop. - Input: None - - Effect: The KCFG is transformed to an equivalent in which no further edge lifts are possible. - - Output: - - bool: An indicator of whether or not at least one edge lift was performed. + Returns: + An indicator of whether or not at least one edge lift was performed. """ - edges_to_lift = sorted( [ node.id @@ -977,21 +972,17 @@ def lift_edges(self) -> bool: def lift_split_edge(self, b_id: NodeIdLike) -> None: """Lift a split up an edge directly preceding it. - Input: - - b_id: the identifier of the central node `B` of the structure `A --> B --> [C_1, ..., C_N]`. - - Effect: - `A --M steps--> B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]` becomes - `A --[cond_1, ..., cond_N]--> [A #And cond_1 --M steps--> C_1, ..., A #And cond_N --M steps--> C_N]`. - Node `B` is removed. + `A --M steps--> B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]` becomes + `A --[cond_1, ..., cond_N]--> [A #And cond_1 --M steps--> C_1, ..., A #And cond_N --M steps--> C_N]`. + Node `B` is removed. - Output: None + Args: + b_id: The identifier of the central node `B` of the structure `A --> B --> [C_1, ..., C_N]`. Raises: - - `AssertionError`, if the structure in question is not in place. - - `AssertionError`, if any of the `cond_i` contain variables not present in `A`. + AssertionError: If the structure in question is not in place. + AssertionError: If any of the `cond_i` contain variables not present in `A`. """ - # Obtain edge `A -> B`, split `[cond_I, C_I | I = 1..N ]` a_to_b = single(self.edges(target_id=b_id)) a = a_to_b.source @@ -1024,19 +1015,16 @@ def lift_split_edge(self, b_id: NodeIdLike) -> None: def lift_split_split(self, b_id: NodeIdLike) -> None: """Lift a split up a split directly preceding it, joining them into a single split. - Input: - - b_id: the identifier of the node `B` of the structure - `A --[..., cond_B, ...]--> [..., B, ...]` with `B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]` - - Effect: - `A --[..., cond_B, ...]--> [..., B, ...]` with `B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]` becomes - `A --[..., cond_B #And cond_1, ..., cond_B #And cond_N, ...]--> [..., C_1, ..., C_N, ...]`. - Node `B` is removed. + `A --[..., cond_B, ...]--> [..., B, ...]` with `B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]` becomes + `A --[..., cond_B #And cond_1, ..., cond_B #And cond_N, ...]--> [..., C_1, ..., C_N, ...]`. + Node `B` is removed. - Output: None + Args: + b_id: the identifier of the node `B` of the structure + `A --[..., cond_B, ...]--> [..., B, ...]` with `B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]`. Raises: - - `AssertionError`, if the structure in question is not in place. + AssertionError: If the structure in question is not in place. """ # Obtain splits `A --[..., cond_B, ...]--> [..., B, ...]` and # `B --[cond_1, ..., cond_N]--> [C_1, ..., C_N]-> [C_1, ..., C_N]` @@ -1072,12 +1060,10 @@ def lift_split_split(self, b_id: NodeIdLike) -> None: def lift_splits(self) -> bool: """Perform all possible split liftings. - Input: None + The KCFG is transformed to an equivalent in which no further split lifts are possible. - Effect: The KCFG is transformed to an equivalent in which no further split lifts are possible. - - Output: - - bool: An indicator of whether or not at least one split lift was performed. + Returns: + An indicator of whether or not at least one split lift was performed. """ def _lift_split(finder: Callable, lifter: Callable) -> bool: @@ -1110,15 +1096,9 @@ def _fold_lift(result: bool, finder_lifter: tuple[Callable, Callable]) -> bool: def minimize(self) -> None: """Minimize KCFG by repeatedly performing the lifting transformations. + The KCFG is transformed to an equivalent in which no further lifting transformations are possible. The loop is designed so that each transformation is performed once in each iteration. - - Input: None - - Effect: The KCFG is transformed to an equivalent in which no further lifting transformations are possible. - - Output: None """ - repeat = True while repeat: repeat = self.lift_edges() diff --git a/pyk/src/pyk/kcfg/show.py b/pyk/src/pyk/kcfg/show.py index bc463daec52..3069450bfa3 100644 --- a/pyk/src/pyk/kcfg/show.py +++ b/pyk/src/pyk/kcfg/show.py @@ -123,7 +123,6 @@ def pretty_segments(self, kcfg: KCFG, minimize: bool = True) -> Iterable[tuple[s The identifier tells you whether that segment is for a given node, edge, or just pretty spacing ('unknown'). This is useful for applications which want to pretty print in chunks, so that they can know which printed region corresponds to each node/edge. """ - processed_nodes: list[KCFG.Node] = [] ret_lines: list[tuple[str, list[str]]] = [] diff --git a/pyk/src/pyk/kllvm/hints/prooftrace.py b/pyk/src/pyk/kllvm/hints/prooftrace.py index d0a02aabf5b..29a2cb4bfef 100644 --- a/pyk/src/pyk/kllvm/hints/prooftrace.py +++ b/pyk/src/pyk/kllvm/hints/prooftrace.py @@ -31,15 +31,11 @@ class LLVMStepEvent(ABC): - """ - Abstract base class representing an LLVM step event. - """ + """Abstract base class representing an LLVM step event.""" class LLVMRewriteEvent(LLVMStepEvent): - """ - Represents LLVM rewrite event. - """ + """Represents LLVM rewrite event.""" @property @abstractmethod @@ -59,8 +55,7 @@ def substitution(self) -> dict[str, Pattern]: @final class LLVMRuleEvent(LLVMRewriteEvent): - """ - Represents an LLVM rule event. + """Represents an LLVM rule event. Attributes: _rule_event (llvm_rule_event): The underlying LLVM rule event. @@ -69,8 +64,7 @@ class LLVMRuleEvent(LLVMRewriteEvent): _rule_event: llvm_rule_event def __init__(self, rule_event: llvm_rule_event) -> None: - """ - Initializes a new instance of the LLVMRuleEvent class. + """Initialize a new instance of the LLVMRuleEvent class. Args: rule_event (llvm_rule_event): The LLVM rule event object. @@ -78,7 +72,8 @@ def __init__(self, rule_event: llvm_rule_event) -> None: self._rule_event = rule_event def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMRuleEvent object using the AST printing method. """ @@ -97,8 +92,7 @@ def substitution(self) -> dict[str, Pattern]: @final class LLVMSideConditionEventEnter(LLVMRewriteEvent): - """ - Represents an event that enters a side condition in LLVM rewriting. + """Represents an event that enters a side condition in LLVM rewriting. This event is used to check the side condition of a rule. Mostly used in ensures/requires clauses. @@ -109,8 +103,7 @@ class LLVMSideConditionEventEnter(LLVMRewriteEvent): _side_condition_event: llvm_side_condition_event def __init__(self, side_condition_event: llvm_side_condition_event) -> None: - """ - Initializes a new instance of the LLVMSideConditionEventEnter class. + """Initialize a new instance of the LLVMSideConditionEventEnter class. Args: side_condition_event (llvm_side_condition_event): The LLVM side condition event object. @@ -118,7 +111,8 @@ def __init__(self, side_condition_event: llvm_side_condition_event) -> None: self._side_condition_event = side_condition_event def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMSideConditionEventEnter object using the AST printing method. """ @@ -137,8 +131,7 @@ def substitution(self) -> dict[str, Pattern]: @final class LLVMSideConditionEventExit(LLVMStepEvent): - """ - Represents an LLVM side condition event indicating the exit of a side condition. + """Represents an LLVM side condition event indicating the exit of a side condition. This event contains the result of the side condition evaluation. @@ -149,8 +142,7 @@ class LLVMSideConditionEventExit(LLVMStepEvent): _side_condition_end_event: llvm_side_condition_end_event def __init__(self, side_condition_end_event: llvm_side_condition_end_event) -> None: - """ - Initializes a new instance of the LLVMSideConditionEventExit class. + """Initialize a new instance of the LLVMSideConditionEventExit class. Args: side_condition_end_event (llvm_side_condition_end_event): The LLVM side condition end event object. @@ -158,7 +150,8 @@ def __init__(self, side_condition_end_event: llvm_side_condition_end_event) -> N self._side_condition_end_event = side_condition_end_event def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMSideConditionEventExit object using the AST printing method. """ @@ -166,19 +159,18 @@ def __repr__(self) -> str: @property def rule_ordinal(self) -> int: - """Returns the axiom ordinal number associated with the side condition event.""" + """Return the axiom ordinal number associated with the side condition event.""" return self._side_condition_end_event.rule_ordinal @property def check_result(self) -> bool: - """Returns the boolean result of the evaluation of the side condition that corresponds to this event.""" + """Return the boolean result of the evaluation of the side condition that corresponds to this event.""" return self._side_condition_end_event.check_result @final class LLVMFunctionEvent(LLVMStepEvent): - """ - Represents an LLVM function event in a proof trace. + """Represent an LLVM function event in a proof trace. Attributes: _function_event (llvm_function_event): The underlying LLVM function event object. @@ -187,8 +179,7 @@ class LLVMFunctionEvent(LLVMStepEvent): _function_event: llvm_function_event def __init__(self, function_event: llvm_function_event) -> None: - """ - Initializes a new instance of the LLVMFunctionEvent class. + """Initialize a new instance of the LLVMFunctionEvent class. Args: function_event (llvm_function_event): The LLVM function event object. @@ -196,7 +187,8 @@ def __init__(self, function_event: llvm_function_event) -> None: self._function_event = function_event def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMFunctionEvent object using the AST printing method. """ @@ -204,24 +196,23 @@ def __repr__(self) -> str: @property def name(self) -> str: - """Returns the name of the LLVM function as a KORE Symbol Name.""" + """Return the name of the LLVM function as a KORE Symbol Name.""" return self._function_event.name @property def relative_position(self) -> str: - """Returns the relative position of the LLVM function event in the proof trace. Ex.: (0:0:0:0)""" + """Return the relative position of the LLVM function event in the proof trace. Ex.: (0:0:0:0).""" return self._function_event.relative_position @property def args(self) -> list[LLVMArgument]: - """Returns a list of LLVMArgument objects representing the arguments of the LLVM function.""" + """Return a list of LLVMArgument objects representing the arguments of the LLVM function.""" return [LLVMArgument(arg) for arg in self._function_event.args] @final class LLVMHookEvent(LLVMStepEvent): - """ - Represents a hook event in LLVM execution. + """Represents a hook event in LLVM execution. Attributes: _hook_event (llvm_hook_event): The underlying hook event object. @@ -230,8 +221,7 @@ class LLVMHookEvent(LLVMStepEvent): _hook_event: llvm_hook_event def __init__(self, hook_event: llvm_hook_event) -> None: - """ - Initializes a new instance of the LLVMHookEvent class. + """Initialize a new instance of the LLVMHookEvent class. Args: hook_event (llvm_hook_event): The LLVM hook event object. @@ -239,7 +229,8 @@ def __init__(self, hook_event: llvm_hook_event) -> None: self._hook_event = hook_event def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMHookEvent object using the AST printing method. """ @@ -247,29 +238,28 @@ def __repr__(self) -> str: @property def name(self) -> str: - """Returns the attribute name of the hook event. Ex.: "INT.add" """ + """Return the attribute name of the hook event. Ex.: "INT.add".""" return self._hook_event.name @property def relative_position(self) -> str: - """Returns the relative position of the hook event in the proof trace. Ex.: (0:0:0:0)""" + """Return the relative position of the hook event in the proof trace. Ex.: (0:0:0:0).""" return self._hook_event.relative_position @property def args(self) -> list[LLVMArgument]: - """Returns a list of LLVMArgument objects representing the arguments of the hook event.""" + """Return a list of LLVMArgument objects representing the arguments of the hook event.""" return [LLVMArgument(arg) for arg in self._hook_event.args] @property def result(self) -> Pattern: - """Returns the result pattern of the hook event evaluation.""" + """Return the result pattern of the hook event evaluation.""" return self._hook_event.result @final class LLVMArgument: - """ - Represents an LLVM argument. + """Represents an LLVM argument. Attributes: _argument (Argument): The underlying Argument object. An argument is a wrapper object containing either a step @@ -279,8 +269,7 @@ class LLVMArgument: _argument: Argument def __init__(self, argument: Argument) -> None: - """ - Initializes the LLVMArgument object. + """Initialize the LLVMArgument object. Args: argument (Argument): The Argument object. @@ -288,7 +277,8 @@ def __init__(self, argument: Argument) -> None: self._argument = argument def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: Returns a string representation of the LLVMArgument object using the AST printing method. """ @@ -312,23 +302,22 @@ def step_event(self) -> LLVMStepEvent: @property def kore_pattern(self) -> Pattern: - """Returns the KORE Pattern associated with the argument if any.""" + """Return the KORE Pattern associated with the argument if any.""" assert isinstance(self._argument.kore_pattern, Pattern) return self._argument.kore_pattern def is_kore_pattern(self) -> bool: - """Checks if the argument is a KORE Pattern.""" + """Check if the argument is a KORE Pattern.""" return self._argument.is_kore_pattern() def is_step_event(self) -> bool: - """Checks if the argument is a step event.""" + """Check if the argument is a step event.""" return self._argument.is_step_event() @final class LLVMRewriteTrace: - """ - Represents an LLVM rewrite trace. + """Represents an LLVM rewrite trace. Attributes: _rewrite_trace (llvm_rewrite_trace): The underlying LLVM rewrite trace object. @@ -337,8 +326,7 @@ class LLVMRewriteTrace: _rewrite_trace: llvm_rewrite_trace def __init__(self, rewrite_trace: llvm_rewrite_trace) -> None: - """ - Initializes a new instance of the LLVMRewriteTrace class. + """Initialize a new instance of the LLVMRewriteTrace class. Args: rewrite_trace (llvm_rewrite_trace): The LLVM rewrite trace object. @@ -346,7 +334,8 @@ def __init__(self, rewrite_trace: llvm_rewrite_trace) -> None: self._rewrite_trace = rewrite_trace def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMRewriteTrace object using the AST printing method. """ @@ -369,20 +358,23 @@ def initial_config(self) -> LLVMArgument: @property def trace(self) -> list[LLVMArgument]: - """Returns a list of events that occurred after the initial configurarion was constructed until the end of the - proof trace when the final configuration is reached.""" + """Returns the trace. + + The trace is the list of events that occurred after the initial configurarion was constructed until the end of the + proof trace when the final configuration is reached. + """ return [LLVMArgument(event) for event in self._rewrite_trace.trace] @staticmethod def parse(trace: bytes, header: KoreHeader) -> LLVMRewriteTrace: - """Parses the given proof hints byte string using the given kore_header object to create an LLVMRewriteTrace - object.""" + """Parse the given proof hints byte string using the given kore_header object.""" return LLVMRewriteTrace(llvm_rewrite_trace.parse(trace, header._kore_header)) class KoreHeader: - """ - Represents the Kore header, a file that contains the version of the Binary KORE used to serialize/deserialize the + """Represents the Kore header. + + The Kore header is a file that contains the version of the Binary KORE used to serialize/deserialize the Proof Trace and all the aditional information needed make this process faster the Proof Trace. Attributes: @@ -392,8 +384,7 @@ class KoreHeader: _kore_header: kore_header def __init__(self, kore_header: kore_header) -> None: - """ - Initializes a new instance of the KoreHeader class. + """Initialize a new instance of the KoreHeader class. Args: kore_header (kore_header): The KORE Header object. @@ -402,14 +393,14 @@ def __init__(self, kore_header: kore_header) -> None: @staticmethod def create(header_path: Path) -> KoreHeader: - """Creates a new KoreHeader object from the given header file path.""" + """Create a new KoreHeader object from the given header file path.""" return KoreHeader(kore_header(str(header_path))) class LLVMEventType: - """ - Represents an LLVM event type. This works as a wrapper around the EventType enum. + """Represents an LLVM event type. + This works as a wrapper around the EventType enum. It also provides properties to check the type of the event. Attributes: @@ -419,8 +410,7 @@ class LLVMEventType: _event_type: EventType def __init__(self, event_type: EventType) -> None: - """ - Initializes a new instance of the LLVMEventType class. + """Initialize a new instance of the LLVMEventType class. Args: event_type (EventType): The EventType object. @@ -444,8 +434,7 @@ def is_trace(self) -> bool: class LLVMEventAnnotated: - """ - Represents an annotated LLVM event. + """Represents an annotated LLVM event. This class is used to wrap an llvm_event and its corresponding event type. This class is used to iterate over the LLVM rewrite trace events. @@ -457,7 +446,7 @@ class LLVMEventAnnotated: _annotated_llvm_event: annotated_llvm_event def __init__(self, annotated_llvm_event: annotated_llvm_event) -> None: - """Initializes a new instance of the LLVMEventAnnotated class. + """Initialize a new instance of the LLVMEventAnnotated class. Args: annotated_llvm_event (annotated_llvm_event): The annotated LLVM event object. @@ -476,8 +465,7 @@ def event(self) -> LLVMArgument: class LLVMRewriteTraceIterator: - """ - Represents an LLVM rewrite trace iterator. + """Represents an LLVM rewrite trace iterator. This class is used to iterate over the LLVM rewrite trace events in the stream parser. @@ -488,8 +476,7 @@ class LLVMRewriteTraceIterator: _rewrite_trace_iterator: llvm_rewrite_trace_iterator def __init__(self, rewrite_trace_iterator: llvm_rewrite_trace_iterator) -> None: - """ - Initializes a new instance of the LLVMRewriteTraceIterator class. + """Initialize a new instance of the LLVMRewriteTraceIterator class. Args: rewrite_trace_iterator (llvm_rewrite_trace_iterator): The LLVM rewrite trace iterator object. @@ -497,23 +484,21 @@ def __init__(self, rewrite_trace_iterator: llvm_rewrite_trace_iterator) -> None: self._rewrite_trace_iterator = rewrite_trace_iterator def __repr__(self) -> str: - """ + """Return a string representation of the object. + Returns: A string representation of the LLVMRewriteTraceIterator object using the AST printing method. """ return self._rewrite_trace_iterator.__repr__() def __iter__(self) -> Generator[LLVMEventAnnotated, None, None]: - """Yields LLVMEventAnnotated options. + """Yield LLVMEventAnnotated options. This method is an iterator that yields LLVMEventAnnotated options. It iterates over the events in the trace and returns the next event as an LLVMEventAnnotated object. Yields: LLVMEventAnnotated: The next LLVMEventAnnotated option. - - Raises: - None """ while True: next_event = self._rewrite_trace_iterator.get_next_event() @@ -523,7 +508,7 @@ def __iter__(self) -> Generator[LLVMEventAnnotated, None, None]: yield LLVMEventAnnotated(next_event) def __next__(self) -> LLVMEventAnnotated: - """Yields the next LLVMEventAnnotated object from the iterator. + """Yield the next LLVMEventAnnotated object from the iterator. Returns: LLVMEventAnnotated: The next LLVMEventAnnotated object. @@ -539,10 +524,10 @@ def __next__(self) -> LLVMEventAnnotated: @property def version(self) -> int: - """Returns the version of the HINTS format.""" + """Return the version of the HINTS format.""" return self._rewrite_trace_iterator.version @staticmethod def from_file(trace_path: Path, header: KoreHeader) -> LLVMRewriteTraceIterator: - """Creates a new LLVMRewriteTraceIterator object from the given trace and header file paths.""" + """Create a new LLVMRewriteTraceIterator object from the given trace and header file paths.""" return LLVMRewriteTraceIterator(llvm_rewrite_trace_iterator.from_file(str(trace_path), header._kore_header)) diff --git a/pyk/src/pyk/konvert/_module_to_kore.py b/pyk/src/pyk/konvert/_module_to_kore.py index 2fb1519425b..3b5d65432b9 100644 --- a/pyk/src/pyk/konvert/_module_to_kore.py +++ b/pyk/src/pyk/konvert/_module_to_kore.py @@ -85,7 +85,6 @@ def module_to_kore(definition: KDefinition) -> Module: """Convert the main module of a kompiled KAST definition to KORE format.""" - module = simplified_module(definition) defn = KDefinition(module.name, (module,)) # for getting the sort lattice @@ -686,7 +685,8 @@ def axiom(overloaded: KProduction, overloader: KProduction) -> Axiom: def simplified_module(definition: KDefinition, module_name: str | None = None) -> KFlatModule: - """ + """Perform a series of simplification steps on a module. + In ModuleToKORE.java, there are some implicit KAST-to-KAST kompilation steps hidden in the conversion. In particular, the kompiled KAST definition (compiled.json) is modular, whereas the kompiled definition diff --git a/pyk/src/pyk/kore/rpc.py b/pyk/src/pyk/kore/rpc.py index 4e309eec4d3..c23dbf7bd61 100644 --- a/pyk/src/pyk/kore/rpc.py +++ b/pyk/src/pyk/kore/rpc.py @@ -1220,7 +1220,7 @@ def _cli_args(self) -> list[str]: return res def _extra_args(self) -> list[str]: - """Command line arguments that are intended to be included in the bug report""" + """Command line arguments that are intended to be included in the bug report.""" smt_server_args = [] if self._smt_timeout: smt_server_args += ['--smt-timeout', str(self._smt_timeout)] diff --git a/pyk/src/pyk/kore/syntax.py b/pyk/src/pyk/kore/syntax.py index 3c94561191e..6d112e60a7b 100644 --- a/pyk/src/pyk/kore/syntax.py +++ b/pyk/src/pyk/kore/syntax.py @@ -561,9 +561,9 @@ def sorts(self) -> tuple[Sort, ...]: ... @property def ctor_patterns(self) -> tuple[Pattern, ...]: - """ - Patterns used to construct the term with `of`. - Except for `Assoc`, `DV`, `MLFixpoint` and `MLQuant` it coincides with `patterns`. + """Return patterns used to construct the term with `of`. + + Except for `Assoc`, `DV`, `MLFixpoint` and `MLQuant` this coincides with `patterns`. """ return self.patterns diff --git a/pyk/src/pyk/kore_exec_covr/__main__.py b/pyk/src/pyk/kore_exec_covr/__main__.py index 5284cf26e16..cf6d692d84f 100644 --- a/pyk/src/pyk/kore_exec_covr/__main__.py +++ b/pyk/src/pyk/kore_exec_covr/__main__.py @@ -20,14 +20,16 @@ def do_analyze(definition_dir: Path, input_file: Path) -> None: - """ - Inputs: - * definition compiled with "kompile --backend haskell --emit-json" - * Log file produced with "KORE_EXEC_OPTS='--log-format oneline --log-entries DebugAppliedRewriteRules,DebugApplyEquation'" + """Analyze log file. + + Args: + definition_dir: Definition kompiled with ``kompile --backend haskell --emit-json``. + input_file: Log file produced with + ``KORE_EXEC_OPTS='--log-format oneline --log-entries DebugAppliedRewriteRules,DebugApplyEquation'``. - Outputs: - * human-readable report of applied rewrite and simplification rules, - with labels (if declared) and locations + Returns: + Human-readable report of applied rewrite and simplification rules, + with labels (if declared) and locations. """ definition = read_kast_definition(definition_dir / 'compiled.json') diff --git a/pyk/src/pyk/kore_exec_covr/kore_exec_covr.py b/pyk/src/pyk/kore_exec_covr/kore_exec_covr.py index 1869af12056..8e1995db6a5 100644 --- a/pyk/src/pyk/kore_exec_covr/kore_exec_covr.py +++ b/pyk/src/pyk/kore_exec_covr/kore_exec_covr.py @@ -27,14 +27,17 @@ class HaskellLogEntry(Enum): def parse_rule_applications(haskell_backend_oneline_log_file: Path) -> dict[HaskellLogEntry, dict[str, int]]: - """ - Traverse a one-line log file produced by K's Haskell backend and extract information about: - * applied rewrites (DebugAppliedRewriteRules) - * applied simplifications (DEBUG_APPLY_EQUATION) + """Process a one-line log file produced by K's Haskell backend. + + Extracts information about: + + - Applied rewrites (DebugAppliedRewriteRules). + - Applied simplifications (DEBUG_APPLY_EQUATION). - Note: Haskell backend logs often contain rule applications with empty locations. - It seems likely that those are generated projection rules. - We report their applications in bulk with UNKNOWN location. + Note: + Haskell backend logs often contain rule applications with empty locations. + It seems likely that those are generated projection rules. + We report their applications in bulk with UNKNOWN location. """ rewrites: dict[str, int] = defaultdict(int) simplifications: dict[str, int] = defaultdict(int) @@ -61,7 +64,7 @@ def parse_rule_applications(haskell_backend_oneline_log_file: Path) -> dict[Hask def _parse_haskell_oneline_log(log_entry: str) -> tuple[HaskellLogEntry, str] | None: - """Attempt to parse a one-line log string emmitted by K's Haskell backend""" + """Attempt to parse a one-line log string emmitted by K's Haskell backend.""" matches = _HASKELL_LOG_ENTRY_REGEXP.match(log_entry) try: assert matches @@ -75,9 +78,7 @@ def _parse_haskell_oneline_log(log_entry: str) -> tuple[HaskellLogEntry, str] | def build_rule_dict( definition: KDefinition, *, skip_projections: bool = True, skip_initializers: bool = True ) -> dict[str, KRule]: - """ - Traverse the kompiled definition and build a dictionary mapping str(file:location) to KRule - """ + """Traverse the kompiled definition and build a dictionary mapping str(file:location) to KRule.""" rule_dict: dict[str, KRule] = {} for rule in definition.rules: diff --git a/pyk/src/pyk/krepl/repl.py b/pyk/src/pyk/krepl/repl.py index 63a13f95455..4705250a7ed 100644 --- a/pyk/src/pyk/krepl/repl.py +++ b/pyk/src/pyk/krepl/repl.py @@ -118,12 +118,13 @@ def __init__(self) -> None: @abstractmethod def do_load(self, args: Any) -> bool | None: # Leaky abstraction - make extension mechanism more robust - """ - Abstract method to set up the interpreter. + """Set up the interpreter. + Subclasses are expected to - * decorate the method with `with_argparser` to ensure the right set of arguments is parsed; - * instantiate an `Interpreter[T]` based on `args`, then set `self.interpreter`; - * set `self.state` to `self.interpreter.init_state()`. + + - Decorate the method with `with_argparser` to ensure the right set of arguments is parsed. + - Instantiate an `Interpreter[T]` based on `args`, then set `self.interpreter`. + - Set `self.state` to `self.interpreter.init_state()`. """ ... diff --git a/pyk/src/pyk/ktool/kprove.py b/pyk/src/pyk/ktool/kprove.py index b9ff48dd328..195bee76536 100644 --- a/pyk/src/pyk/ktool/kprove.py +++ b/pyk/src/pyk/ktool/kprove.py @@ -519,18 +519,20 @@ def _resolve_depends(module_list: KFlatModuleList) -> KFlatModuleList: """Resolve each depends value relative to the module the claim belongs to. Example: - - module THIS-MODULE - claim ... [depends(foo,OTHER-MODULE.bar)] - endmodule - - becomes - - module THIS-MODULE - claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] - endmodule + ``` + module THIS-MODULE + claim ... [depends(foo,OTHER-MODULE.bar)] + endmodule + ``` + + becomes + + ``` + module THIS-MODULE + claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] + endmodule + ``` """ - labels = {claim.label for module in module_list.modules for claim in module.claims} def resolve_claim_depends(module_name: str, claim: KClaim) -> KClaim: diff --git a/pyk/src/pyk/ktool/krun.py b/pyk/src/pyk/ktool/krun.py index f3823a33723..43df462837a 100644 --- a/pyk/src/pyk/ktool/krun.py +++ b/pyk/src/pyk/ktool/krun.py @@ -302,16 +302,19 @@ def _build_arg_list( def llvm_interpret(definition_dir: str | Path, pattern: Pattern, *, depth: int | None = None) -> Pattern: - """ - Execute the `interpreter` binary generated by the LLVM Backend. + """Execute the `interpreter` binary generated by the LLVM Backend. - :param definition_dir: Path to the kompiled definition directory - :param pattern: KORE pattern to start rewriting from - :param depth: Maximal number of rewrite steps to take - :return: A pattern resulting from the rewrites - :raises RuntimeError: If the interpreter fails - """ + Args: + definition_dir: Path to the kompiled definition directory. + pattern: KORE pattern to start rewriting from. + depth: Maximal number of rewrite steps to take. + Returns: + The pattern resulting from the rewrites. + + Raises: + RuntimeError: If the interpreter fails. + """ definition_dir = Path(definition_dir) check_dir_path(definition_dir) diff --git a/pyk/src/pyk/proof/proof.py b/pyk/src/pyk/proof/proof.py index 0736da51a6e..b73fe0e1249 100644 --- a/pyk/src/pyk/proof/proof.py +++ b/pyk/src/pyk/proof/proof.py @@ -34,10 +34,12 @@ class ProofStatus(Enum): class Proof(Generic[PS, SR]): - """Abstract representation of a proof that can be executed in one or more discrete steps + """Abstract representation of a proof that can be executed in one or more discrete steps. - :param PS: Proof step: data required to perform a step of the proof. - :param SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof` + Generic type variables: + + - PS: Proof step: data required to perform a step of the proof. + - SR: Step result: data produced by executing a PS with ``Prover.step_proof`` used to update the `Proof`. """ _PROOF_TYPES: Final = {'APRProof', 'EqualityProof', 'RefutationProof'} @@ -115,9 +117,7 @@ def digest(self) -> str: @property def up_to_date(self) -> bool: - """ - Check that the proof's representation on disk is up-to-date. - """ + """Check that the proof's representation on disk is up-to-date.""" if self.proof_dir is None: raise ValueError(f'Cannot check if proof {self.id} with no proof_dir is up-to-date') proof_path = self.proof_dir / f'{hash_str(id)}.json' @@ -151,8 +151,7 @@ def remove_subproof(self, proof_id: str) -> None: def fetch_subproof( self, proof_id: str, force_reread: bool = False, uptodate_check_method: str = 'timestamp' ) -> Proof: - """Get a subproof, re-reading from disk if it's not up-to-date""" - + """Get a subproof, re-reading from disk if it's not up-to-date.""" if self.proof_dir is not None and (force_reread or not self._subproofs[proof_id].up_to_date): updated_subproof = Proof.read_proof(proof_id, self.proof_dir) self._subproofs[proof_id] = updated_subproof @@ -163,8 +162,7 @@ def fetch_subproof( def fetch_subproof_data( self, proof_id: str, force_reread: bool = False, uptodate_check_method: str = 'timestamp' ) -> Proof: - """Get a subproof, re-reading from disk if it's not up-to-date""" - + """Get a subproof, re-reading from disk if it's not up-to-date.""" if self.proof_dir is not None and (force_reread or not self._subproofs[proof_id].up_to_date): updated_subproof = Proof.read_proof_data(self.proof_dir, proof_id) self._subproofs[proof_id] = updated_subproof @@ -174,7 +172,7 @@ def fetch_subproof_data( @property def subproofs(self) -> Iterable[Proof]: - """Return the subproofs, re-reading from disk the ones that changed""" + """Return the subproofs, re-reading from disk the ones that changed.""" return self._subproofs.values() @property @@ -324,22 +322,27 @@ def parallel_advance_proof( fail_fast: bool = False, max_workers: int = 1, ) -> None: - """Advance proof with multithreaded strategy: `Prover.step_proof()` to a worker thread pool for each step - as available, and `Proof.commit()` results as they become available, and get new steps with - `Proof.get_steps()` and submit to thread pool. - - :param P: Type of proof to be advanced in parallel - :param PS: Proof step: data required to perform a step of the proof. - :param SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof` - - :param create_prover: Function which creates a new `Prover`. These provers must not reference any shared - data to be written during `parallel_advance_proof`, to avoid race conditions. - :param max_iterations: Maximum number of steps to take - :param fail_fast: If the proof is failing after finishing a step, halt execution even if there are - still available steps - :param max_workers: Maximum number of worker threads the pool can spawn + """Advance proof with multithreaded strategy. + + `Prover.step_proof()` to a worker thread pool for each step as available, + and `Proof.commit()` results as they become available, + and get new steps with `Proof.get_steps()` and submit to thread pool. + + Generic type variables: + + - P: Type of proof to be advanced in parallel. + - PS: Proof step: data required to perform a step of the proof. + - SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof`. + + Args: + proof: The proof to advance. + create_prover: Function which creates a new `Prover`. These provers must not reference any shared + data to be written during `parallel_advance_proof`, to avoid race conditions. + max_iterations: Maximum number of steps to take. + fail_fast: If the proof is failing after finishing a step, + halt execution even if there are still available steps. + max_workers: Maximum number of worker threads the pool can spawn. """ - pending: set[Future[Any]] = set() explored: set[PS] = set() iterations = 0 @@ -385,14 +388,11 @@ def submit_steps(_steps: Iterable[PS]) -> None: class _ProverPool(ContextManager['_ProverPool'], Generic[P, PS, SR]): """Wrapper for `ThreadPoolExecutor` which spawns one `Prover` for each worker thread. - :param P: Type of proof to be advanced in parallel - :param PS: Proof step: data required to perform a step of the proof. - :param SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof` - - :param create_prover: Function which creates a new `Prover`. These provers must not reference any shared - data to be written during `parallel_advance_proof`, to avoid race conditions. - :param max_workers: Maximum number of worker threads the pool can spawn + Generic type variables: + - P: Type of proof to be advanced in parallel. + - PS: Proof step: data required to perform a step of the proof. + - SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof`. """ _create_prover: Callable[[], Prover[P, PS, SR]] @@ -406,6 +406,13 @@ def __init__( *, max_workers: int | None = None, ) -> None: + """Initialize an instance. + + Args: + create_prover: Function which creates a new `Prover`. These provers must not reference any shared + data to be written during `parallel_advance_proof`, to avoid race conditions. + max_workers (optional): Maximum number of worker threads the pool can spawn. + """ self._create_prover = create_prover self._provers = {} self._executor = ThreadPoolExecutor(max_workers) @@ -443,10 +450,13 @@ def step() -> Iterable[SR]: class Prover(ContextManager['Prover'], Generic[P, PS, SR]): - """Abstract class which advances `Proof`s with `init_proof()` and `step_proof()` - :param P: Type of proof this `Prover` operates on - :param PS: Proof step: data required to perform a step of the proof. - :param SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof` + """Abstract class which advances `Proof`s with `init_proof()` and `step_proof()`. + + Generic type variables: + + - P: Type of proof this `Prover` operates on. + - PS: Proof step: data required to perform a step of the proof. + - SR: Step result: data produced by executing a PS with `Prover.step_proof` used to update the `Proof`. """ def __enter__(self) -> Prover[P, PS, SR]: @@ -463,25 +473,32 @@ def failure_info(self, proof: P) -> FailureInfo: ... @abstractmethod def step_proof(self, step: PS) -> Iterable[SR]: - """Do the work associated with a `PS`, a proof step. Should not modify a `Proof` or `self`, but may read - from `self` as long as those fields are not being modified during `step_proof()`, `get_steps()`, and - `commit()`. + """Do the work associated with a `PS`, a proof step. + + Should not modify a `Proof` or `self`, but may read from `self` as long as + those fields are not being modified during `step_proof()`, `get_steps()`, and `commit()`. """ ... @abstractmethod def init_proof(self, proof: P) -> None: - """Perform any initialization steps needed at the beginning of proof execution. For example, for - `APRProver`, upload circularity and depends module of the proof to the KoreServer via `add_module`. + """Perform any initialization steps needed at the beginning of proof execution. + + For example, for `APRProver`, upload circularity and depends module of the proof + to the `KoreServer` via `add_module`. """ ... def advance_proof(self, proof: P, max_iterations: int | None = None, fail_fast: bool = False) -> None: - """Advance proof by a simple loop of `Proof.get_steps()` -> `Prover.step_proof()` -> `Proof.commit()` + """Advance a proof. + + Performs loop `Proof.get_steps()` -> `Prover.step_proof()` -> `Proof.commit()`. - :param max_iterations: Maximum number of steps to take - :param fail_fast: If the proof is failing after finishing a step, halt execution even if there are - still available steps + Args: + proof: proof to advance. + max_iterations (optional): Maximum number of steps to take. + fail_fast: If the proof is failing after finishing a step, + halt execution even if there are still available steps. """ iterations = 0 _LOGGER.info(f'Initializing proof: {proof.id}') diff --git a/pyk/src/pyk/proof/reachability.py b/pyk/src/pyk/proof/reachability.py index 73cb82d5a4f..a079190441e 100644 --- a/pyk/src/pyk/proof/reachability.py +++ b/pyk/src/pyk/proof/reachability.py @@ -76,9 +76,12 @@ class APRProofStep: class APRProof(Proof[APRProofStep, APRProofResult], KCFGExploration): - """APRProof and APRProver implement all-path reachability logic, + """Represent an all-path reachability proof. + + APRProof and APRProver implement all-path reachability logic, as introduced by A. Stefanescu and others in their paper 'All-Path Reachability Logic': https://doi.org/10.23638/LMCS-15(2:5)2019 + Note that reachability logic formula `phi =>A psi` has *not* the same meaning as CTL/CTL*'s `phi -> AF psi`, since reachability logic ignores infinite traces. This implementation extends the above with bounded model checking, allowing the user From e90afe00498500698560357bbb397685820bec98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Sat, 22 Jun 2024 00:01:49 +0200 Subject: [PATCH 02/10] Change the `pyk` API docs theme (#4451) Changes the Sphinx theme from `alabaster` to `sphinx_rtd_theme`, i.e. _Read the Docs_. To check the generated docs under the new theme, run `make docs` in the `pyk` directory, then open `docs/build/index.html` in a web browser. --- pyk/docs/conf.py | 3 ++- pyk/docs/generate.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pyk/docs/conf.py b/pyk/docs/conf.py index 319f3161c36..0bb235d9721 100644 --- a/pyk/docs/conf.py +++ b/pyk/docs/conf.py @@ -19,6 +19,7 @@ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', + 'sphinx_rtd_theme', ] templates_path = ['_templates'] exclude_patterns = [] @@ -34,5 +35,5 @@ # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -html_theme = 'alabaster' +html_theme = 'sphinx_rtd_theme' html_static_path = ['_static'] diff --git a/pyk/docs/generate.sh b/pyk/docs/generate.sh index b8f8f0bccc0..16ecea79386 100755 --- a/pyk/docs/generate.sh +++ b/pyk/docs/generate.sh @@ -21,7 +21,7 @@ source $VENV_DIR/bin/activate pip install --upgrade pip pip install --editable $PYK_DIR -pip install sphinx==$SPHINX_VERSION +pip install sphinx==$SPHINX_VERSION sphinx_rtd_theme # Generate and install _kllvm PYTHON_LIB=$(find $VENV_DIR -name 'site-packages' -type d) From 78a82dc2f8beb340ed00b2d6ca03bf9ea093f562 Mon Sep 17 00:00:00 2001 From: rv-jenkins Date: Mon, 24 Jun 2024 07:30:42 -0600 Subject: [PATCH 03/10] Update dependency: deps/haskell-backend_release (#4456) Co-authored-by: devops --- deps/haskell-backend_release | 2 +- flake.lock | 8 ++++---- flake.nix | 2 +- haskell-backend/src/main/native/haskell-backend | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deps/haskell-backend_release b/deps/haskell-backend_release index 600c0076132..02dc6f66811 100644 --- a/deps/haskell-backend_release +++ b/deps/haskell-backend_release @@ -1 +1 @@ -v0.1.14 +v0.1.19 diff --git a/flake.lock b/flake.lock index 7df2d6a9a38..4059105ce23 100644 --- a/flake.lock +++ b/flake.lock @@ -63,16 +63,16 @@ "z3": "z3" }, "locked": { - "lastModified": 1718711620, - "narHash": "sha256-HipdmxMVwN0c7DXyc5VZgMFyZV+RC9+weGVroqByGeo=", + "lastModified": 1719218678, + "narHash": "sha256-7FVcMNEGulpwvzwrlPwqgd+qhArW3mE6TA/e6oSF0m4=", "owner": "runtimeverification", "repo": "haskell-backend", - "rev": "8102c7dd2ad5b69cc1c8daa87e6bc7227d9dbed5", + "rev": "a6f5646a160c277e6e252552eee86563309a9776", "type": "github" }, "original": { "owner": "runtimeverification", - "ref": "v0.1.14", + "ref": "v0.1.19", "repo": "haskell-backend", "type": "github" } diff --git a/flake.nix b/flake.nix index 79ac2932abe..7f6b7996349 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "K Framework"; inputs = { - haskell-backend.url = "github:runtimeverification/haskell-backend/v0.1.14"; + haskell-backend.url = "github:runtimeverification/haskell-backend/v0.1.19"; nixpkgs.follows = "llvm-backend/nixpkgs"; flake-utils.url = "github:numtide/flake-utils"; llvm-backend = { diff --git a/haskell-backend/src/main/native/haskell-backend b/haskell-backend/src/main/native/haskell-backend index 8102c7dd2ad..a6f5646a160 160000 --- a/haskell-backend/src/main/native/haskell-backend +++ b/haskell-backend/src/main/native/haskell-backend @@ -1 +1 @@ -Subproject commit 8102c7dd2ad5b69cc1c8daa87e6bc7227d9dbed5 +Subproject commit a6f5646a160c277e6e252552eee86563309a9776 From 1ee5abba141e781e05a17a896f3018141982736c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Mon, 24 Jun 2024 17:52:29 +0200 Subject: [PATCH 04/10] Add workflow to run `actionlint` (#4468) [actionlint](https://github.com/rhysd/actionlint) is a static checker for GitHub Actions workflow files. This PR adds a workflow that runs `actionlint` to check `.yml` files in `.github/workflows`. It also cleans up some issues flagged by `shellcheck`, a linter for shell scripts `actionlint` calls out to. --------- Co-authored-by: Bruce Collie --- .github/actionlint.yaml | 5 ++ .github/workflows/develop.yml | 48 ++++++++--------- .github/workflows/master-pr.yml | 8 +-- .github/workflows/release.yml | 79 +++++++++++++++------------- .github/workflows/run-actionlint.yml | 16 ++++++ .github/workflows/test-pr.yml | 45 ++++++++-------- .github/workflows/update-deps.yml | 6 +-- 7 files changed, 118 insertions(+), 89 deletions(-) create mode 100644 .github/actionlint.yaml create mode 100644 .github/workflows/run-actionlint.yml diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 00000000000..cdd80bc93a8 --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,5 @@ +self-hosted-runner: + labels: + - MacM1 + - normal + - performance diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 088b6676933..c3623290323 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -27,7 +27,7 @@ jobs: git checkout -B master origin/master old_develop="$(git merge-base origin/develop origin/master)" new_develop="$(git rev-parse origin/develop)" - if git diff --exit-code ${old_develop} ${new_develop} -- package/version; then + if git diff --exit-code "${old_develop}" "${new_develop}" -- package/version; then git merge --no-edit origin/develop ./package/version.sh bump else @@ -55,41 +55,41 @@ jobs: run: | set -euxo pipefail workspace=$(pwd) - docker run --name k-posting-profiling-tests-${GITHUB_SHA} \ - --rm -it --detach \ - -e BENCHER_API_TOKEN=$BENCHER_API_TOKEN \ - -e BENCHER_PROJECT=$BENCHER_PROJECT \ - -e BENCHER_ADAPTER=$BENCHER_ADAPTER \ - -e GITHUB_HEAD_REF=$GITHUB_HEAD_REF \ - -e GITHUB_BASE_REF=$GITHUB_BASE_REF \ - -e GITHUB_TOKEN=$GITHUB_TOKEN \ - -e GITHUB_ACTIONS=true \ - -e GITHUB_EVENT_NAME=$GITHUB_EVENT_NAME \ - -e GITHUB_EVENT_PATH=$GITHUB_EVENT_PATH \ - -e GITHUB_REPOSITORY=$GITHUB_REPOSITORY \ - -e GITHUB_REF=$GITHUB_REF \ - -e GITHUB_SHA=${GITHUB_SHA} \ - --workdir /opt/workspace \ - -v "${workspace}:/opt/workspace" \ - -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ - ${BASE_OS}:${BASE_DISTRO} + docker run --name "k-posting-profiling-tests-${GITHUB_SHA}" \ + --rm -it --detach \ + -e BENCHER_API_TOKEN="$BENCHER_API_TOKEN" \ + -e BENCHER_PROJECT="$BENCHER_PROJECT" \ + -e BENCHER_ADAPTER="$BENCHER_ADAPTER" \ + -e GITHUB_HEAD_REF="$GITHUB_HEAD_REF" \ + -e GITHUB_BASE_REF="$GITHUB_BASE_REF" \ + -e GITHUB_TOKEN="$GITHUB_TOKEN" \ + -e GITHUB_ACTIONS=true \ + -e GITHUB_EVENT_NAME="$GITHUB_EVENT_NAME" \ + -e GITHUB_EVENT_PATH="$GITHUB_EVENT_PATH" \ + -e GITHUB_REPOSITORY="$GITHUB_REPOSITORY" \ + -e GITHUB_REF="$GITHUB_REF" \ + -e GITHUB_SHA="$GITHUB_SHA" \ + --workdir /opt/workspace \ + -v "${workspace}:/opt/workspace" \ + -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ + "${BASE_OS}:${BASE_DISTRO}" - name: 'Setting up dependencies' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh SKIP_K_BUILD' + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh SKIP_K_BUILD' - name: 'Getting Performance Tests Results' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/post_results_to_develop.py ${GITHUB_SHA}' + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/post_results_to_develop.py "${GITHUB_SHA}"' - name: 'Posting Performance Tests Results' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c 'bencher run \ + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c 'bencher run \ --if-branch "develop" --else-if-branch "master" \ --file .profiling-results.json --err --ci-only-on-alert \ --github-actions "${GITHUB_TOKEN}" "echo Exporting report"' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-posting-profiling-tests-${GITHUB_SHA} - docker container rm --force k-posting-profiling-tests-${GITHUB_SHA} || true + docker stop --time=0 "k-posting-profiling-tests-${GITHUB_SHA}" + docker container rm --force "k-posting-profiling-tests-${GITHUB_SHA}" || true diff --git a/.github/workflows/master-pr.yml b/.github/workflows/master-pr.yml index e96dfb1d31a..2dda01b9e34 100644 --- a/.github/workflows/master-pr.yml +++ b/.github/workflows/master-pr.yml @@ -25,8 +25,8 @@ jobs: run: | set -x pull_number=$(jq --raw-output .pull_request.number "${GITHUB_EVENT_PATH}") - curl -X PATCH \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${GITHUB_TOKEN}" \ - https://api.github.com/repos/runtimeverification/k/pulls/${pull_number} \ + curl -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${GITHUB_TOKEN}" \ + "https://api.github.com/repos/runtimeverification/k/pulls/${pull_number}" \ -d '{"base":"develop"}' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f45bfe88ef3..2c1fd886c47 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: 'Get release_id' - run: echo "release_id=$(jq --raw-output '.release.id' $GITHUB_EVENT_PATH)" >> ${GITHUB_OUTPUT} + run: echo "release_id=$(jq --raw-output '.release.id' "$GITHUB_EVENT_PATH")" >> "${GITHUB_OUTPUT}" id: release outputs: release_id: ${{ steps.release.outputs.release_id }} @@ -35,13 +35,14 @@ jobs: set -x version=$(cat package/version) tarball=kframework-${version}-src.tar.gz + # shellcheck disable=SC2038 find . -name .git | xargs rm -r CURDIR=$(pwd) cd .. - tar czvf ${tarball} $(basename ${CURDIR}) - mv ${tarball} ${CURDIR}/ - cd ${CURDIR} - gh release upload --repo runtimeverification/k --clobber v${version} ${tarball} + tar czvf "${tarball}" "$(basename "${CURDIR}")" + mv "${tarball}" "${CURDIR}/" + cd "${CURDIR}" + gh release upload --repo runtimeverification/k --clobber "v${version}" "${tarball}" cachix-release: name: 'k-framework-binary cachix release' @@ -114,8 +115,8 @@ jobs: run: | set -x version=$(cat package/version) - cp kframework_amd64_ubuntu_jammy.deb kframework_${version}_amd64_ubuntu_jammy.deb - gh release upload --repo runtimeverification/k --clobber v${version} kframework_${version}_amd64_ubuntu_jammy.deb + cp kframework_amd64_ubuntu_jammy.deb "kframework_${version}_amd64_ubuntu_jammy.deb" + gh release upload --repo runtimeverification/k --clobber "v${version}" "kframework_${version}_amd64_ubuntu_jammy.deb" - name: 'Build, Test, and Push Dockerhub Image' shell: bash {0} env: @@ -125,19 +126,19 @@ jobs: set -euxo pipefail version=$(cat package/version) version_tag=ubuntu-jammy-${version} - docker login --username rvdockerhub --password ${DOCKERHUB_PASSWORD} - docker image build . --file package/docker/Dockerfile.ubuntu-jammy --tag ${DOCKERHUB_REPO}:${version_tag} - docker run --name k-package-docker-test-jammy-${GITHUB_SHA} --rm -it --detach ${DOCKERHUB_REPO}:${version_tag} - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && echo "module TEST imports BOOL endmodule" > test.k' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && kompile test.k --backend llvm' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && kompile test.k --backend haskell' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && pyk kompile test.k --backend llvm' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && pyk kompile test.k --backend haskell' - docker image push ${DOCKERHUB_REPO}:${version_tag} + docker login --username rvdockerhub --password "${DOCKERHUB_PASSWORD}" + docker image build . --file package/docker/Dockerfile.ubuntu-jammy --tag "${DOCKERHUB_REPO}:${version_tag}" + docker run --name "k-package-docker-test-jammy-${GITHUB_SHA}" --rm -it --detach "${DOCKERHUB_REPO}:${version_tag}" + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && echo "module TEST imports BOOL endmodule" > test.k' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && kompile test.k --backend llvm' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && kompile test.k --backend haskell' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && pyk kompile test.k --backend llvm' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && pyk kompile test.k --backend haskell' + docker image push "${DOCKERHUB_REPO}:${version_tag}" - name: 'Clean up Docker Container' if: always() run: | - docker stop --time=0 k-package-docker-test-jammy-${GITHUB_SHA} + docker stop --time=0 "k-package-docker-test-jammy-${GITHUB_SHA}" - name: On Failure, Upload the kore-exec.tar.gz file to the Summary Page if: failure() uses: actions/upload-artifact@v4 @@ -193,7 +194,9 @@ jobs: # https://github.com/actions/runner-images/issues/6459 # https://github.com/actions/runner-images/issues/6507 # https://github.com/actions/runner-images/issues/2322 - brew list -1 | grep python | while read formula; do brew unlink $formula; brew link --overwrite $formula; done + + # shellcheck disable=SC2162 + brew list -1 | grep python | while read formula; do brew unlink "$formula"; brew link --overwrite "$formula"; done - name: Build brew bottle id: build @@ -209,15 +212,16 @@ jobs: ROOT_URL='https://github.com/runtimeverification/k/releases/download' wget "$ROOT_URL/v${VERSION}/kframework-${VERSION}-src.tar.gz" cd homebrew-k - ../kframework/package/macos/brew-update-to-local ${PACKAGE} ${VERSION} - git commit Formula/$PACKAGE.rb -m "Update ${PACKAGE} to ${VERSION}: part 1" - ../kframework/package/macos/brew-build-and-update-to-local-bottle ${PACKAGE} ${VERSION} ${ROOT_URL} + ../kframework/package/macos/brew-update-to-local "${PACKAGE}" "${VERSION}" + git commit "Formula/$PACKAGE.rb" -m "Update ${PACKAGE} to ${VERSION}: part 1" + ../kframework/package/macos/brew-build-and-update-to-local-bottle "${PACKAGE}" "${VERSION}" "${ROOT_URL}" git reset HEAD^ - LOCAL_BOTTLE_NAME=$(basename $(find . -name "kframework--${VERSION}.arm64_sonoma.bottle*.tar.gz")) - BOTTLE_NAME=$(echo ${LOCAL_BOTTLE_NAME#./} | sed 's!kframework--!kframework-!') - ../kframework/package/macos/brew-update-to-final ${PACKAGE} ${VERSION} ${ROOT_URL} - echo "path=${LOCAL_BOTTLE_NAME}" >> ${GITHUB_OUTPUT} - echo "path_remote=${BOTTLE_NAME}" >> ${GITHUB_OUTPUT} + LOCAL_BOTTLE_NAME=$(basename "$(find . -name "kframework--${VERSION}.arm64_sonoma.bottle*.tar.gz")") + # shellcheck disable=2001 + BOTTLE_NAME=$(echo "${LOCAL_BOTTLE_NAME#./}" | sed 's!kframework--!kframework-!') + ../kframework/package/macos/brew-update-to-final "${PACKAGE}" "${VERSION}" "${ROOT_URL}" + echo "path=${LOCAL_BOTTLE_NAME}" >> "${GITHUB_OUTPUT}" + echo "path_remote=${BOTTLE_NAME}" >> "${GITHUB_OUTPUT}" - name: Upload bottle uses: actions/upload-artifact@v4 @@ -279,7 +283,9 @@ jobs: # https://github.com/actions/runner-images/issues/6459 # https://github.com/actions/runner-images/issues/6507 # https://github.com/actions/runner-images/issues/2322 - brew list -1 | grep python | while read formula; do brew unlink $formula; brew link --overwrite $formula; done + + # shellcheck disable=SC2162 + brew list -1 | grep python | while read formula; do brew unlink "$formula"; brew link --overwrite "$formula"; done - name: 'Test brew bottle' id: test @@ -324,8 +330,8 @@ jobs: run: | set -x version=$(cat k-homebrew-checkout/package/version) - mv homebrew-k-old/${BOTTLE_NAME} homebrew-k-old/${REMOTE_BOTTLE_NAME} - gh release upload --repo runtimeverification/k --clobber v${version} homebrew-k-old/${REMOTE_BOTTLE_NAME} + mv "homebrew-k-old/${BOTTLE_NAME}" "homebrew-k-old/${REMOTE_BOTTLE_NAME}" + gh release upload --repo runtimeverification/k --clobber "v${version}" "homebrew-k-old/${REMOTE_BOTTLE_NAME}" - run: | git config --global user.name rv-jenkins @@ -385,16 +391,16 @@ jobs: MAVEN_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} run: | cat ~/.m2/settings.xml - docker exec -t k-release-ci-${GITHUB_SHA} bash -c 'mkdir -p /home/github-runner/.m2' - docker cp ~/.m2/settings.xml k-release-ci-${GITHUB_SHA}:/tmp/settings.xml - docker exec -t k-release-ci-${GITHUB_SHA} bash -c 'mv /tmp/settings.xml /home/github-runner/.m2/settings.xml' - docker exec -e MAVEN_USERNAME -e MAVEN_PASSWORD -t k-release-ci-${GITHUB_SHA} bash -c "mvn --batch-mode deploy" + docker exec -t "k-release-ci-${GITHUB_SHA}" bash -c 'mkdir -p /home/github-runner/.m2' + docker cp ~/.m2/settings.xml "k-release-ci-${GITHUB_SHA}:/tmp/settings.xml" + docker exec -t "k-release-ci-${GITHUB_SHA} bash" -c 'mv /tmp/settings.xml /home/github-runner/.m2/settings.xml' + docker exec -e MAVEN_USERNAME -e MAVEN_PASSWORD -t "k-release-ci-${GITHUB_SHA}" bash -c "mvn --batch-mode deploy" - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-release-ci-${GITHUB_SHA} - docker container rm --force k-release-ci-${GITHUB_SHA} || true + docker stop --time=0 "k-release-ci-${GITHUB_SHA}" + docker container rm --force "k-release-ci-${GITHUB_SHA}" || true - name: Publish release uses: actions/github-script@v7 @@ -455,7 +461,7 @@ jobs: -H "Authorization: Bearer ${GITHUB_TOKEN}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/runtimeverification/devops/dispatches \ - -d '{"event_type":"on-demand-test","client_payload":{"repo":"runtimeverification/k","version":"'${VERSION}'"}}' + -d '{"event_type":"on-demand-test","client_payload":{"repo":"runtimeverification/k","version":"'"${VERSION}"'"}}' pyk-build-docs: name: 'Build Pyk Documentation' @@ -532,6 +538,7 @@ jobs: npm run build-sitemap cd - mv web/public_content ./ + # shellcheck disable=SC2046 rm -rf $(find . -maxdepth 1 -not -name public_content -a -not -name .git -a -not -path . -a -not -path .. -a -not -name CNAME) mv public_content/* ./ rm -rf public_content diff --git a/.github/workflows/run-actionlint.yml b/.github/workflows/run-actionlint.yml new file mode 100644 index 00000000000..2d7cd5f6a09 --- /dev/null +++ b/.github/workflows/run-actionlint.yml @@ -0,0 +1,16 @@ +name: Lint GitHub Actions workflows +on: pull_request + +jobs: + actionlint: + name: Run actionlint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Download actionlint + id: get_actionlint + run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) + shell: bash + - name: Check workflow files + run: ${{ steps.get_actionlint.outputs.executable }} -color + shell: bash diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index 4d43a311ed7..77a19e55e99 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -81,12 +81,12 @@ jobs: distro: jammy llvm: 15 - name: 'Build and Test K' - run: docker exec -t k-ci-${GITHUB_SHA} /bin/bash -c 'mvn verify -Dspotless.check.skip=true --batch-mode -U' + run: docker exec -t "k-ci-${GITHUB_SHA}" /bin/bash -c 'mvn verify -Dspotless.check.skip=true --batch-mode -U' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-ci-${GITHUB_SHA} - docker container rm --force k-ci-${GITHUB_SHA} || true + docker stop --time=0 "k-ci-${GITHUB_SHA}" + docker container rm --force "k-ci-${GITHUB_SHA}" || true test-package-ubuntu-jammy: @@ -222,10 +222,11 @@ jobs: GC_DONT_GC: '1' run: | nix --version - export JQ=$(nix-build '' -A jq --no-link)/bin/jq + JQ=$(nix-build '' -A jq --no-link)/bin/jq + export JQ k=$(nix build . --print-build-logs --json | $JQ -r '.[].outputs | to_entries[].value') - drv=$(nix-store --query --deriver ${k}) - nix-store --query --requisites ${drv} | cachix push k-framework + drv=$(nix-store --query --deriver "$k") + nix-store --query --requisites "$drv" | cachix push k-framework - name: 'Smoke test K' run: GC_DONT_GC=1 nix build --print-build-logs .#smoke-test # These tests take a really long time to run on other platforms, so we @@ -366,33 +367,33 @@ jobs: run: | set -euxo pipefail workspace=$(pwd) - docker run --name k-profiling-tests-${GITHUB_SHA} \ + docker run --name "k-profiling-tests-${GITHUB_SHA}" \ --rm -it --detach --user root \ - -e BENCHER_API_TOKEN=$BENCHER_API_TOKEN \ - -e BENCHER_PROJECT=$BENCHER_PROJECT \ - -e BENCHER_ADAPTER=$BENCHER_ADAPTER \ - -e GITHUB_HEAD_REF=$GITHUB_HEAD_REF \ - -e GITHUB_BASE_REF=$GITHUB_BASE_REF \ - -e GITHUB_TOKEN=$GITHUB_TOKEN \ + -e BENCHER_API_TOKEN="$BENCHER_API_TOKEN" \ + -e BENCHER_PROJECT="$BENCHER_PROJECT" \ + -e BENCHER_ADAPTER="$BENCHER_ADAPTER" \ + -e GITHUB_HEAD_REF="$GITHUB_HEAD_REF" \ + -e GITHUB_BASE_REF="$GITHUB_BASE_REF" \ + -e GITHUB_TOKEN="$GITHUB_TOKEN" \ -e GITHUB_ACTIONS=true \ - -e GITHUB_EVENT_NAME=$GITHUB_EVENT_NAME \ - -e GITHUB_EVENT_PATH=$GITHUB_EVENT_PATH \ - -e GITHUB_REPOSITORY=$GITHUB_REPOSITORY \ - -e GITHUB_REF=$GITHUB_REF \ + -e GITHUB_EVENT_NAME="$GITHUB_EVENT_NAME" \ + -e GITHUB_EVENT_PATH="$GITHUB_EVENT_PATH" \ + -e GITHUB_REPOSITORY="$GITHUB_REPOSITORY" \ + -e GITHUB_REF="$GITHUB_REF" \ --workdir /opt/workspace \ -v "${workspace}:/opt/workspace" \ -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ - runtimeverificationinc/k-profiling-tests-${GITHUB_SHA} + "runtimeverificationinc/k-profiling-tests-${GITHUB_SHA}" - name: 'Install K from Package' run: | set -euxo pipefail - docker exec -t k-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh' + docker exec -t "k-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh' - name: 'Profiling Performance Tests' run: | set -euxo pipefail - docker exec -t k-profiling-tests-${GITHUB_SHA} /bin/bash -c 'cd k-distribution/tests/profiling && make' + docker exec -t "k-profiling-tests-${GITHUB_SHA}" /bin/bash -c 'cd k-distribution/tests/profiling && make' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-profiling-tests-${GITHUB_SHA} - docker container rm --force k-profiling-tests-${GITHUB_SHA} || true + docker stop --time=0 "k-profiling-tests-${GITHUB_SHA}" + docker container rm --force "k-profiling-tests-${GITHUB_SHA}" || true diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml index 6edafbfacc8..9a66903a64b 100644 --- a/.github/workflows/update-deps.yml +++ b/.github/workflows/update-deps.yml @@ -48,16 +48,16 @@ jobs: hs_backend_version=$(cat deps/haskell-backend_release) cd haskell-backend/src/main/native/haskell-backend - git checkout ${hs_backend_version} + git checkout "${hs_backend_version}" cd - - sed -i 's! haskell-backend.url = "github:runtimeverification/haskell-backend/.*";! haskell-backend.url = "github:runtimeverification/haskell-backend/'${hs_backend_version}'";!' flake.nix + sed -i 's! haskell-backend.url = "github:runtimeverification/haskell-backend/.*";! haskell-backend.url = "github:runtimeverification/haskell-backend/'"${hs_backend_version}"'";!' flake.nix if git add flake.nix haskell-backend/src/main/native/haskell-backend && git commit -m "flake.nix, haskell-backend/src/main/native/haskell-backend: update to version ${hs_backend_version}"; then changed=true fi llvm_backend_version="v$(cat deps/llvm-backend_release)" cd llvm-backend/src/main/native/llvm-backend - git checkout ${llvm_backend_version} + git checkout "${llvm_backend_version}" cd - sed -i 's! url = "github:runtimeverification/llvm-backend/.*";! url = "github:runtimeverification/llvm-backend/'"${llvm_backend_version}"'";!' flake.nix if git add flake.nix llvm-backend/src/main/native/llvm-backend && git commit -m "flake.nix, llvm-backend/src/main/native/llvm-backend: update to version ${llvm_backend_version}"; then From 44d0dbc2866b25bb80aa074eb7ad81cf95be96e5 Mon Sep 17 00:00:00 2001 From: rv-jenkins Date: Mon, 24 Jun 2024 11:25:01 -0600 Subject: [PATCH 05/10] Update dependency: deps/llvm-backend_release (#4465) Co-authored-by: devops Co-authored-by: Bruce Collie --- deps/llvm-backend_release | 2 +- flake.lock | 8 ++++---- flake.nix | 2 +- llvm-backend/src/main/native/llvm-backend | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deps/llvm-backend_release b/deps/llvm-backend_release index 0665a48efd5..c892edde544 100644 --- a/deps/llvm-backend_release +++ b/deps/llvm-backend_release @@ -1 +1 @@ -0.1.50 +0.1.51 diff --git a/flake.lock b/flake.lock index 4059105ce23..582046fbfd3 100644 --- a/flake.lock +++ b/flake.lock @@ -112,16 +112,16 @@ ] }, "locked": { - "lastModified": 1718737220, - "narHash": "sha256-0yMDvOr3TQPqpKBKOXAqfrz6FO+rOJ0qG3uNotxXhao=", + "lastModified": 1718910118, + "narHash": "sha256-E2utS5SGK3B7IjHRBlyKSB6TR/gLli14m+7d8AAmp0I=", "owner": "runtimeverification", "repo": "llvm-backend", - "rev": "a7e321383372ba625173d4064836ab4f8cebef85", + "rev": "6671fe4b8de164ac2d141dcce869a06b628ed560", "type": "github" }, "original": { "owner": "runtimeverification", - "ref": "v0.1.50", + "ref": "v0.1.51", "repo": "llvm-backend", "type": "github" } diff --git a/flake.nix b/flake.nix index 7f6b7996349..f346567f8dd 100644 --- a/flake.nix +++ b/flake.nix @@ -5,7 +5,7 @@ nixpkgs.follows = "llvm-backend/nixpkgs"; flake-utils.url = "github:numtide/flake-utils"; llvm-backend = { - url = "github:runtimeverification/llvm-backend/v0.1.50"; + url = "github:runtimeverification/llvm-backend/v0.1.51"; inputs.utils.follows = "flake-utils"; }; rv-utils.url = "github:runtimeverification/rv-nix-tools"; diff --git a/llvm-backend/src/main/native/llvm-backend b/llvm-backend/src/main/native/llvm-backend index a7e32138337..6671fe4b8de 160000 --- a/llvm-backend/src/main/native/llvm-backend +++ b/llvm-backend/src/main/native/llvm-backend @@ -1 +1 @@ -Subproject commit a7e321383372ba625173d4064836ab4f8cebef85 +Subproject commit 6671fe4b8de164ac2d141dcce869a06b628ed560 From c14fbb4d399930d5084b4cfc77084c80455fe1a1 Mon Sep 17 00:00:00 2001 From: gtrepta <50716988+gtrepta@users.noreply.github.com> Date: Mon, 24 Jun 2024 14:02:05 -0500 Subject: [PATCH 06/10] Begin implementing fuzzing (#4454) This introduces the most basic functionality for doing fuzz testing of a property in K. It adds a function `fuzz` to the `ktool.kfuzz` pyk module which takes the location of an llvm-kompiled definition, a template configuration in kore containing input variables to be randomized, a strategy for substituting those variables (so far, only with integers using `kintegers`), and either a function to check the resulting kore or a flag to check the exit code of the interpreter. It will invoke hypothesis to randomize the inputs and run the interpreter. --- pyk/poetry.lock | 46 ++++++- pyk/pyproject.toml | 1 + pyk/src/pyk/ktool/kfuzz.py | 111 ++++++++++++++++ pyk/src/pyk/ktool/krun.py | 31 +++-- pyk/src/tests/integration/ktool/test_fuzz.py | 125 +++++++++++++++++++ 5 files changed, 305 insertions(+), 9 deletions(-) create mode 100644 pyk/src/pyk/ktool/kfuzz.py create mode 100644 pyk/src/tests/integration/ktool/test_fuzz.py diff --git a/pyk/poetry.lock b/pyk/poetry.lock index d181bc4df4a..9de7d3ec3b6 100644 --- a/pyk/poetry.lock +++ b/pyk/poetry.lock @@ -373,6 +373,39 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "hypothesis" +version = "6.103.1" +description = "A library for property-based testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hypothesis-6.103.1-py3-none-any.whl", hash = "sha256:d3c959fab6233e78867499e2117ae9db8dc40eeed936d71a2cfc7b6094972e74"}, + {file = "hypothesis-6.103.1.tar.gz", hash = "sha256:d299d5c21d6408eab3be670c94c974f3acf0b511c61fe81804b09091e393ee1f"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +sortedcontainers = ">=2.1.0,<3.0.0" + +[package.extras] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.54)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] +cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] +codemods = ["libcst (>=0.3.16)"] +crosshair = ["crosshair-tool (>=0.0.54)", "hypothesis-crosshair (>=0.0.4)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=3.2)"] +dpcontracts = ["dpcontracts (>=0.4)"] +ghostwriter = ["black (>=19.10b0)"] +lark = ["lark (>=0.10.1)"] +numpy = ["numpy (>=1.17.3)"] +pandas = ["pandas (>=1.1)"] +pytest = ["pytest (>=4.6)"] +pytz = ["pytz (>=2014.1)"] +redis = ["redis (>=3.0.0)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2024.1)"] + [[package]] name = "importlib-metadata" version = "7.1.0" @@ -892,6 +925,17 @@ files = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + [[package]] name = "textual" version = "0.27.0" @@ -1010,4 +1054,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1e8db8aceb9488a870758157f280a7cbff78677f44d6bedfa585f6bebdd5ffc5" +content-hash = "7888201fe916d5896a73a6bedf4367e437bd8807dad0ef19175db10165d07404" diff --git a/pyk/pyproject.toml b/pyk/pyproject.toml index 5497ac8620f..3be1e5f6207 100644 --- a/pyk/pyproject.toml +++ b/pyk/pyproject.toml @@ -21,6 +21,7 @@ cmd2 = "^2.4.2" coloredlogs = "^15.0.1" filelock = "^3.9.0" graphviz = "^0.20.1" +hypothesis = "^6.103.1" psutil = "5.9.5" pybind11 = "^2.10.3" pytest = "*" diff --git a/pyk/src/pyk/ktool/kfuzz.py b/pyk/src/pyk/ktool/kfuzz.py new file mode 100644 index 00000000000..b2d02971564 --- /dev/null +++ b/pyk/src/pyk/ktool/kfuzz.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from hypothesis import Phase, Verbosity, given, settings +from hypothesis.strategies import builds, fixed_dictionaries, integers + +from ..kast.inner import KSort +from ..konvert import _kast_to_kore +from ..kore.parser import KoreParser +from ..kore.syntax import Assoc, EVar +from ..prelude.k import inj +from ..prelude.kint import intToken +from .krun import llvm_interpret_raw + +if TYPE_CHECKING: + from collections.abc import Callable, Mapping + from pathlib import Path + from typing import Any + + from hypothesis.strategies import SearchStrategy + + from ..kast.inner import KInner + from ..kore.syntax import Pattern + + +def kintegers( + *, + min_value: int | None = None, + max_value: int | None = None, + with_inj: KSort | None = None, +) -> SearchStrategy[Pattern]: + """Return a search strategy for K integers. + + Args: + min_value: Minimum value for the generated integers + max_value: Maximum value for the generated integers + with_inj: Return the integer as an injection into this sort + + Returns: + A strategy which generates integer domain values. + """ + + def int_dv(value: int) -> Pattern: + res: KInner = intToken(value) + if with_inj is not None: + res = inj(KSort('Int'), with_inj, res) + return _kast_to_kore(res) + + return builds(int_dv, integers(min_value=min_value, max_value=max_value)) + + +def fuzz( + definition_dir: str | Path, + template: Pattern, + subst_strategy: dict[EVar, SearchStrategy[Pattern]], + check_func: Callable[[Pattern], Any] | None = None, + check_exit_code: bool = False, + max_examples: int = 50, +) -> None: + """Fuzz a property test with concrete execution over a K term. + + Args: + definition_dir: The location of the K definition to run the interpreter for. + template: The term which will be sent to the interpreter after randomizing inputs. It should contain at least one variable which will be substituted for a value. + subst_strategy: Should have each variable in the template term mapped to a strategy for generating values for it. + check_func: Will be called on the kore output from the interpreter. + Should throw an AssertionError if it determines that the output indicates a test failure. + A RuntimeError will be thrown if this is passed as an argument and check_exit_code is True. + check_exit_code: Check the exit code of the interpreter for a test failure instead of using check_func. + An exit code of 0 indicates a passing test. + A RuntimeError will be thrown if this is True and check_func is also passed as an argument. + max_examples: The number of test cases to run. + + Raises: + RuntimeError: If check_func exists and check_exit_code is set, or check_func doesn't exist and check_exit_code is cleared. + """ + if bool(check_func) == check_exit_code: + raise RuntimeError('Must pass one of check_func or check_exit_code, and not both!') + + def test(subst_case: Mapping[EVar, Pattern]) -> None: + def sub(p: Pattern) -> Pattern: + if isinstance(p, Assoc): + symbol = p.symbol() + args = (arg.top_down(sub) for arg in p.app.args) + return p.of(symbol, patterns=(p.app.let(args=args),)) + if p in subst_case: + assert isinstance(p, EVar) + return subst_case[p] + return p + + test_pattern = template.top_down(sub) + res = llvm_interpret_raw(definition_dir, test_pattern.text) + + if check_exit_code: + assert res.returncode == 0 + else: + assert check_func + res_pattern = KoreParser(res.stdout).pattern() + check_func(res_pattern) + + strat: SearchStrategy = fixed_dictionaries(subst_strategy) + + given(strat)( + settings( + deadline=50000, + max_examples=max_examples, + verbosity=Verbosity.verbose, + phases=(Phase.generate, Phase.target, Phase.shrink), + )(test) + )() diff --git a/pyk/src/pyk/ktool/krun.py b/pyk/src/pyk/ktool/krun.py index 43df462837a..f18717feead 100644 --- a/pyk/src/pyk/ktool/krun.py +++ b/pyk/src/pyk/ktool/krun.py @@ -315,18 +315,33 @@ def llvm_interpret(definition_dir: str | Path, pattern: Pattern, *, depth: int | Raises: RuntimeError: If the interpreter fails. """ - definition_dir = Path(definition_dir) - check_dir_path(definition_dir) + try: + res = llvm_interpret_raw(definition_dir, pattern.text, depth) + except CalledProcessError as err: + raise RuntimeError(f'Interpreter failed with status {err.returncode}: {err.stderr}') from err + + return KoreParser(res.stdout).pattern() + + +def llvm_interpret_raw(definition_dir: str | Path, kore: str, depth: int | None = None) -> CompletedProcess: + """Execute the `interpreter` binary generated by the LLVM Backend, with no processing of input/output. + + Args: + definition_dir: Path to the kompiled definition directory. + pattern: KORE string to start rewriting from. + depth: Maximal number of rewrite steps to take. + + Returns: + The CompletedProcess of the interpreter. + Raises: + CalledProcessError: If the interpreter fails. + """ + definition_dir = Path(definition_dir) interpreter_file = definition_dir / 'interpreter' check_file_path(interpreter_file) depth = depth if depth is not None else -1 args = [str(interpreter_file), '/dev/stdin', str(depth), '/dev/stdout'] - try: - res = run_process(args, input=pattern.text, pipe_stderr=True) - except CalledProcessError as err: - raise RuntimeError(f'Interpreter failed with status {err.returncode}: {err.stderr}') from err - - return KoreParser(res.stdout).pattern() + return run_process(args, input=kore, pipe_stderr=True) diff --git a/pyk/src/tests/integration/ktool/test_fuzz.py b/pyk/src/tests/integration/ktool/test_fuzz.py new file mode 100644 index 00000000000..76d34f9d6af --- /dev/null +++ b/pyk/src/tests/integration/ktool/test_fuzz.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +import pytest + +from pyk.kast.inner import KSort +from pyk.kore.parser import KoreParser +from pyk.kore.prelude import inj, top_cell_initializer +from pyk.kore.syntax import DV, App, Assoc, EVar, SortApp, String +from pyk.ktool.kfuzz import fuzz, kintegers +from pyk.ktool.kprint import _kast +from pyk.testing import KompiledTest + +from ..utils import K_FILES, TEST_DATA_DIR + +if TYPE_CHECKING: + from pathlib import Path + from typing import Final + + from pyk.kore.syntax import Pattern + +_LOGGER: Final = logging.getLogger(__name__) + +FUZZ_FILES: Path = TEST_DATA_DIR / 'fuzzing' + +VAR_X = EVar(name='VarX', sort=SortApp('SortInt')) +VAR_Y = EVar(name='VarY', sort=SortApp('SortInt')) + + +class TestImpFuzz(KompiledTest): + KOMPILE_MAIN_FILE = K_FILES / 'imp.k' + KOMPILE_BACKEND = 'llvm' + SUBSTS = {VAR_X: kintegers(with_inj=KSort('AExp')), VAR_Y: kintegers(with_inj=KSort('AExp'))} + + @staticmethod + def check(p: Pattern) -> None: + def check_inner(p: Pattern) -> Pattern: + match p: + case Assoc(): + symbol = p.symbol() + args = (arg.top_down(check_inner) for arg in p.app.args) + return p.of(symbol, patterns=(p.app.let(args=args),)) + case App("Lbl'UndsPipe'-'-GT-Unds'", args=(key, val)): + match key, val: + case ( + App('inj', args=(DV(value=String('res')),)), + App('inj', args=(DV(value=String(resval)),)), + ): + assert resval == '0' + + return p + + p.top_down(check_inner) + + @staticmethod + def setup_program(definition_dir: Path, text: str) -> Pattern: + kore_text = _kast(definition_dir=definition_dir, input='program', output='kore', expression=text).stdout + + program_pattern = KoreParser(kore_text).pattern() + + def replace_var_ids(p: Pattern) -> Pattern: + match p: + case App('inj', _, (DV(_, String('varx')),)): + return VAR_X + case App('inj', _, (DV(_, String('vary')),)): + return VAR_Y + return p + + program_pattern = program_pattern.top_down(replace_var_ids) + init_pattern = top_cell_initializer( + { + '$PGM': inj(SortApp('SortPgm'), SortApp('SortKItem'), program_pattern), + } + ) + + return init_pattern + + def test_fuzz( + self, + definition_dir: Path, + ) -> None: + # Given + program_text = """ + // Checks the commutativity of addition + int x, y, a, b, res; + x = varx; + y = vary; + a = x + y; + b = y + x; + if ((a <= b) && (b <= a)) { // a == b + res = 0; + } else { + res = 1; + } + """ + + init_pattern = self.setup_program(definition_dir, program_text) + + # Then + fuzz(definition_dir, init_pattern, self.SUBSTS, self.check) + + def test_fuzz_fail( + self, + definition_dir: Path, + ) -> None: + # Given + program_text = """ + // Checks that x <= y + int x, y, res; + x = varx; + y = vary; + if (x <= y) { + res = 0; + } else { + res = 1; + } + """ + + init_pattern = self.setup_program(definition_dir, program_text) + + # Then + with pytest.raises(AssertionError): + fuzz(definition_dir, init_pattern, self.SUBSTS, self.check) From 67eff5d505fb755f0212bc47629b2a1c8ef00b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Mon, 24 Jun 2024 22:02:41 +0200 Subject: [PATCH 07/10] Refactor `from_spec_modules` in `APRProof` (#4447) - Move `ProveRpc`, `ClaimIndex` into separate modules - Add parameter `ordered` to `ClaimIndex.labels` to enable topological sorting of the result - Implement `APRProof.from_spec_modules` using `ClaimIndex` --- pyk/src/pyk/__main__.py | 3 +- pyk/src/pyk/ktool/claim_index.py | 188 ++++++++++++++++ pyk/src/pyk/ktool/kprove.py | 238 +------------------- pyk/src/pyk/ktool/prove_rpc.py | 100 ++++++++ pyk/src/pyk/proof/reachability.py | 87 ++----- pyk/src/tests/integration/ktool/test_imp.py | 2 +- 6 files changed, 319 insertions(+), 299 deletions(-) create mode 100644 pyk/src/pyk/ktool/claim_index.py create mode 100644 pyk/src/pyk/ktool/prove_rpc.py diff --git a/pyk/src/pyk/__main__.py b/pyk/src/pyk/__main__.py index a7b8d6143ea..8664ccbeee6 100644 --- a/pyk/src/pyk/__main__.py +++ b/pyk/src/pyk/__main__.py @@ -36,8 +36,9 @@ from .kore.syntax import Pattern, kore_term from .ktool.kompile import Kompile, KompileBackend from .ktool.kprint import KPrint -from .ktool.kprove import KProve, ProveRpc +from .ktool.kprove import KProve from .ktool.krun import KRun +from .ktool.prove_rpc import ProveRpc from .prelude.k import GENERATED_TOP_CELL from .prelude.ml import is_top, mlAnd, mlOr from .proof.reachability import APRFailureInfo, APRProof diff --git a/pyk/src/pyk/ktool/claim_index.py b/pyk/src/pyk/ktool/claim_index.py new file mode 100644 index 00000000000..a33294f46b2 --- /dev/null +++ b/pyk/src/pyk/ktool/claim_index.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass +from functools import partial +from graphlib import TopologicalSorter +from typing import TYPE_CHECKING + +from ..kast import Atts +from ..kast.outer import KClaim +from ..utils import FrozenDict, unique + +if TYPE_CHECKING: + from collections.abc import Container, Iterable, Iterator + + from ..kast.outer import KFlatModule, KFlatModuleList + + +@dataclass(frozen=True) +class ClaimIndex(Mapping[str, KClaim]): + claims: FrozenDict[str, KClaim] + main_module_name: str | None + + def __init__( + self, + claims: Mapping[str, KClaim], + main_module_name: str | None = None, + ): + self._validate(claims) + object.__setattr__(self, 'claims', FrozenDict(claims)) + object.__setattr__(self, 'main_module_name', main_module_name) + + @staticmethod + def from_module_list(module_list: KFlatModuleList) -> ClaimIndex: + module_list = ClaimIndex._resolve_depends(module_list) + return ClaimIndex( + claims={claim.label: claim for module in module_list.modules for claim in module.claims}, + main_module_name=module_list.main_module, + ) + + @staticmethod + def _validate(claims: Mapping[str, KClaim]) -> None: + for label, claim in claims.items(): + if claim.label != label: + raise ValueError(f'Claim label mismatch, expected: {label}, found: {claim.label}') + + for depend in claim.dependencies: + if depend not in claims: + raise ValueError(f'Invalid dependency label: {depend}') + + @staticmethod + def _resolve_depends(module_list: KFlatModuleList) -> KFlatModuleList: + """Resolve each depends value relative to the module the claim belongs to. + + Example: + ``` + module THIS-MODULE + claim ... [depends(foo,OTHER-MODULE.bar)] + endmodule + ``` + + becomes + + ``` + module THIS-MODULE + claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] + endmodule + ``` + """ + labels = {claim.label for module in module_list.modules for claim in module.claims} + + def resolve_claim_depends(module_name: str, claim: KClaim) -> KClaim: + depends = claim.dependencies + if not depends: + return claim + + resolve = partial(ClaimIndex._resolve_claim_label, labels, module_name) + resolved = [resolve(label) for label in depends] + return claim.let(att=claim.att.update([Atts.DEPENDS(','.join(resolved))])) + + modules: list[KFlatModule] = [] + for module in module_list.modules: + resolve_depends = partial(resolve_claim_depends, module.name) + module = module.map_sentences(resolve_depends, of_type=KClaim) + modules.append(module) + + return module_list.let(modules=modules) + + @staticmethod + def _resolve_claim_label(labels: Container[str], module_name: str | None, label: str) -> str: + """Resolve `label` to a valid label in `labels`, or raise. + + If a `label` is not found and `module_name` is set, the label is tried after qualifying. + """ + if label in labels: + return label + + if module_name is not None: + qualified = f'{module_name}.{label}' + if qualified in labels: + return qualified + + raise ValueError(f'Claim label not found: {label}') + + def __iter__(self) -> Iterator[str]: + return iter(self.claims) + + def __len__(self) -> int: + return len(self.claims) + + def __getitem__(self, label: str) -> KClaim: + try: + label = self.resolve(label) + except ValueError: + raise KeyError(f'Claim not found: {label}') from None + return self.claims[label] + + def resolve(self, label: str) -> str: + return self._resolve_claim_label(self.claims, self.main_module_name, label) + + def resolve_all(self, labels: Iterable[str]) -> list[str]: + return [self.resolve(label) for label in unique(labels)] + + def labels( + self, + *, + include: Iterable[str] | None = None, + exclude: Iterable[str] | None = None, + with_depends: bool = True, + ordered: bool = False, + ) -> list[str]: + """Return a list of labels from the index. + + Args: + include: Labels to include in the result. If `None`, all labels are included. + exclude: Labels to exclude from the result. If `None`, no labels are excluded. + Takes precedence over `include`. + with_depends: If `True`, the result is transitively closed w.r.t. the dependency relation. + Labels in `exclude` are pruned, and their dependencies are not considered on the given path. + ordered: If `True`, the result is topologically sorted w.r.t. the dependency relation. + + Returns: + A list of labels from the index. + + Raises: + ValueError: If an item in `include` or `exclude` cannot be resolved to a valid label. + """ + include = self.resolve_all(include) if include is not None else self.claims + exclude = self.resolve_all(exclude) if exclude is not None else [] + + labels: list[str] + + if with_depends: + labels = self._close_dependencies(labels=include, prune=exclude) + else: + labels = [label for label in include if label not in set(exclude)] + + if ordered: + return self._sort_topologically(labels) + + return labels + + def _close_dependencies(self, labels: Iterable[str], prune: Iterable[str]) -> list[str]: + res: list[str] = [] + + pending = list(labels) + done = set(prune) + + while pending: + label = pending.pop(0) # BFS + + if label in done: + continue + + res.append(label) + pending += self.claims[label].dependencies + done.add(label) + + return res + + def _sort_topologically(self, labels: list[str]) -> list[str]: + label_set = set(labels) + graph = { + label: [dep for dep in claim.dependencies if dep in label_set] + for label, claim in self.claims.items() + if label in labels + } + return list(TopologicalSorter(graph).static_order()) diff --git a/pyk/src/pyk/ktool/kprove.py b/pyk/src/pyk/ktool/kprove.py index 195bee76536..0c2c945f3e2 100644 --- a/pyk/src/pyk/ktool/kprove.py +++ b/pyk/src/pyk/ktool/kprove.py @@ -4,12 +4,8 @@ import logging import os import re -from collections.abc import Mapping from contextlib import contextmanager -from dataclasses import dataclass from enum import Enum -from functools import cached_property, partial -from graphlib import TopologicalSorter from itertools import chain from pathlib import Path from subprocess import CalledProcessError @@ -17,27 +13,25 @@ from ..cli.utils import check_dir_path, check_file_path from ..cterm import CTerm -from ..kast import Atts, kast_term +from ..kast import kast_term from ..kast.inner import KInner -from ..kast.manip import extract_lhs, flatten_label -from ..kast.outer import KApply, KClaim, KDefinition, KFlatModule, KFlatModuleList, KImport, KRequire +from ..kast.manip import flatten_label +from ..kast.outer import KDefinition, KFlatModule, KFlatModuleList, KImport, KRequire from ..kore.rpc import KoreExecLogFormat from ..prelude.ml import is_top -from ..proof import APRProof, APRProver, EqualityProof, ImpliesProver -from ..utils import FrozenDict, gen_file_timestamp, run_process, unique +from ..utils import gen_file_timestamp, run_process from . import TypeInferenceMode +from .claim_index import ClaimIndex from .kprint import KPrint if TYPE_CHECKING: - from collections.abc import Callable, Container, Iterable, Iterator + from collections.abc import Callable, Iterable, Iterator, Mapping from subprocess import CompletedProcess - from typing import ContextManager, Final + from typing import Final - from ..cli.pyk import ProveOptions - from ..kast.outer import KRule, KRuleLike + from ..kast.outer import KClaim, KRule, KRuleLike from ..kast.pretty import SymbolTable from ..kcfg import KCFGExplore - from ..proof import Proof, Prover from ..utils import BugReport _LOGGER: Final = logging.getLogger(__name__) @@ -401,219 +395,3 @@ def _get_rule_line(_line: str) -> tuple[str, bool, int] | None: axioms.pop(-1) return axioms - - -class ProveRpc: - _kprove: KProve - _explore_context: Callable[[], ContextManager[KCFGExplore]] - - def __init__( - self, - kprove: KProve, - explore_context: Callable[[], ContextManager[KCFGExplore]], - ): - self._kprove = kprove - self._explore_context = explore_context - - def prove_rpc(self, options: ProveOptions) -> list[Proof]: - all_claims = self._kprove.get_claims( - options.spec_file, - spec_module_name=options.spec_module, - claim_labels=options.claim_labels, - exclude_claim_labels=options.exclude_claim_labels, - type_inference_mode=options.type_inference_mode, - ) - - if all_claims is None: - raise ValueError(f'No claims found in file: {options.spec_file}') - - return [ - self._prove_claim_rpc( - claim, - max_depth=options.max_depth, - save_directory=options.save_directory, - max_iterations=options.max_iterations, - ) - for claim in all_claims - ] - - def _prove_claim_rpc( - self, - claim: KClaim, - max_depth: int | None = None, - save_directory: Path | None = None, - max_iterations: int | None = None, - ) -> Proof: - definition = self._kprove.definition - - proof: Proof - prover: Prover - lhs_top = extract_lhs(claim.body) - is_functional_claim = type(lhs_top) is KApply and definition.symbols[lhs_top.label.name] in definition.functions - - if is_functional_claim: - proof = EqualityProof.from_claim(claim, definition, proof_dir=save_directory) - if save_directory is not None and EqualityProof.proof_data_exists(proof.id, save_directory): - _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') - proof = EqualityProof.read_proof_data(save_directory, proof.id) - - else: - proof = APRProof.from_claim(definition, claim, {}, proof_dir=save_directory) - if save_directory is not None and APRProof.proof_data_exists(proof.id, save_directory): - _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') - proof = APRProof.read_proof_data(save_directory, proof.id) - - if not proof.passed and (max_iterations is None or max_iterations > 0): - with self._explore_context() as kcfg_explore: - if is_functional_claim: - assert type(proof) is EqualityProof - prover = ImpliesProver(proof, kcfg_explore) - else: - assert type(proof) is APRProof - prover = APRProver(kcfg_explore, execute_depth=max_depth) - prover.advance_proof(proof, max_iterations=max_iterations) - - if proof.passed: - _LOGGER.info(f'Proof passed: {proof.id}') - elif proof.failed: - _LOGGER.info(f'Proof failed: {proof.id}') - else: - _LOGGER.info(f'Proof pending: {proof.id}') - return proof - - -@dataclass(frozen=True) -class ClaimIndex(Mapping[str, KClaim]): - claims: FrozenDict[str, KClaim] - main_module_name: str | None - - def __init__( - self, - claims: Mapping[str, KClaim], - main_module_name: str | None = None, - ): - self._validate(claims) - object.__setattr__(self, 'claims', FrozenDict(claims)) - object.__setattr__(self, 'main_module_name', main_module_name) - - @staticmethod - def from_module_list(module_list: KFlatModuleList) -> ClaimIndex: - module_list = ClaimIndex._resolve_depends(module_list) - return ClaimIndex( - claims={claim.label: claim for module in module_list.modules for claim in module.claims}, - main_module_name=module_list.main_module, - ) - - @staticmethod - def _validate(claims: Mapping[str, KClaim]) -> None: - for label, claim in claims.items(): - if claim.label != label: - raise ValueError(f'Claim label mismatch, expected: {label}, found: {claim.label}') - - for depend in claim.dependencies: - if depend not in claims: - raise ValueError(f'Invalid dependency label: {depend}') - - @staticmethod - def _resolve_depends(module_list: KFlatModuleList) -> KFlatModuleList: - """Resolve each depends value relative to the module the claim belongs to. - - Example: - ``` - module THIS-MODULE - claim ... [depends(foo,OTHER-MODULE.bar)] - endmodule - ``` - - becomes - - ``` - module THIS-MODULE - claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] - endmodule - ``` - """ - labels = {claim.label for module in module_list.modules for claim in module.claims} - - def resolve_claim_depends(module_name: str, claim: KClaim) -> KClaim: - depends = claim.dependencies - if not depends: - return claim - - resolve = partial(ClaimIndex._resolve_claim_label, labels, module_name) - resolved = [resolve(label) for label in depends] - return claim.let(att=claim.att.update([Atts.DEPENDS(','.join(resolved))])) - - modules: list[KFlatModule] = [] - for module in module_list.modules: - resolve_depends = partial(resolve_claim_depends, module.name) - module = module.map_sentences(resolve_depends, of_type=KClaim) - modules.append(module) - - return module_list.let(modules=modules) - - @staticmethod - def _resolve_claim_label(labels: Container[str], module_name: str | None, label: str) -> str: - """Resolve `label` to a valid label in `labels`, or raise. - - If a `label` is not found and `module_name` is set, the label is tried after qualifying. - """ - if label in labels: - return label - - if module_name is not None: - qualified = f'{module_name}.{label}' - if qualified in labels: - return qualified - - raise ValueError(f'Claim label not found: {label}') - - def __iter__(self) -> Iterator[str]: - return iter(self.claims) - - def __len__(self) -> int: - return len(self.claims) - - def __getitem__(self, label: str) -> KClaim: - try: - label = self.resolve(label) - except ValueError: - raise KeyError(f'Claim not found: {label}') from None - return self.claims[label] - - @cached_property - def topological(self) -> tuple[str, ...]: - graph = {label: claim.dependencies for label, claim in self.claims.items()} - return tuple(TopologicalSorter(graph).static_order()) - - def resolve(self, label: str) -> str: - return self._resolve_claim_label(self.claims, self.main_module_name, label) - - def resolve_all(self, labels: Iterable[str]) -> list[str]: - return [self.resolve(label) for label in unique(labels)] - - def labels( - self, - *, - include: Iterable[str] | None = None, - exclude: Iterable[str] | None = None, - with_depends: bool = True, - ) -> list[str]: - res: list[str] = [] - - pending = self.resolve_all(include) if include is not None else list(self.claims) - done = set(self.resolve_all(exclude)) if exclude is not None else set() - - while pending: - label = pending.pop(0) # BFS - - if label in done: - continue - - res.append(label) - done.add(label) - - if with_depends: - pending += self.claims[label].dependencies - - return res diff --git a/pyk/src/pyk/ktool/prove_rpc.py b/pyk/src/pyk/ktool/prove_rpc.py new file mode 100644 index 00000000000..250381d3f54 --- /dev/null +++ b/pyk/src/pyk/ktool/prove_rpc.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from ..kast.manip import extract_lhs +from ..kast.outer import KApply +from ..proof import APRProof, APRProver, EqualityProof, ImpliesProver + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + from typing import ContextManager, Final + + from ..cli.pyk import ProveOptions + from ..kast.outer import KClaim + from ..kcfg import KCFGExplore + from ..proof import Proof, Prover + from .kprove import KProve + +_LOGGER: Final = logging.getLogger(__name__) + + +class ProveRpc: + _kprove: KProve + _explore_context: Callable[[], ContextManager[KCFGExplore]] + + def __init__( + self, + kprove: KProve, + explore_context: Callable[[], ContextManager[KCFGExplore]], + ): + self._kprove = kprove + self._explore_context = explore_context + + def prove_rpc(self, options: ProveOptions) -> list[Proof]: + all_claims = self._kprove.get_claims( + options.spec_file, + spec_module_name=options.spec_module, + claim_labels=options.claim_labels, + exclude_claim_labels=options.exclude_claim_labels, + type_inference_mode=options.type_inference_mode, + ) + + if all_claims is None: + raise ValueError(f'No claims found in file: {options.spec_file}') + + return [ + self._prove_claim_rpc( + claim, + max_depth=options.max_depth, + save_directory=options.save_directory, + max_iterations=options.max_iterations, + ) + for claim in all_claims + ] + + def _prove_claim_rpc( + self, + claim: KClaim, + max_depth: int | None = None, + save_directory: Path | None = None, + max_iterations: int | None = None, + ) -> Proof: + definition = self._kprove.definition + + proof: Proof + prover: Prover + lhs_top = extract_lhs(claim.body) + is_functional_claim = type(lhs_top) is KApply and definition.symbols[lhs_top.label.name] in definition.functions + + if is_functional_claim: + proof = EqualityProof.from_claim(claim, definition, proof_dir=save_directory) + if save_directory is not None and EqualityProof.proof_data_exists(proof.id, save_directory): + _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') + proof = EqualityProof.read_proof_data(save_directory, proof.id) + + else: + proof = APRProof.from_claim(definition, claim, {}, proof_dir=save_directory) + if save_directory is not None and APRProof.proof_data_exists(proof.id, save_directory): + _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') + proof = APRProof.read_proof_data(save_directory, proof.id) + + if not proof.passed and (max_iterations is None or max_iterations > 0): + with self._explore_context() as kcfg_explore: + if is_functional_claim: + assert type(proof) is EqualityProof + prover = ImpliesProver(proof, kcfg_explore) + else: + assert type(proof) is APRProof + prover = APRProver(kcfg_explore, execute_depth=max_depth) + prover.advance_proof(proof, max_iterations=max_iterations) + + if proof.passed: + _LOGGER.info(f'Proof passed: {proof.id}') + elif proof.failed: + _LOGGER.info(f'Proof failed: {proof.id}') + else: + _LOGGER.info(f'Proof pending: {proof.id}') + return proof diff --git a/pyk/src/pyk/proof/reachability.py b/pyk/src/pyk/proof/reachability.py index a079190441e..e0ece7406f9 100644 --- a/pyk/src/pyk/proof/reachability.py +++ b/pyk/src/pyk/proof/reachability.py @@ -1,6 +1,5 @@ from __future__ import annotations -import graphlib import json import logging import re @@ -10,13 +9,13 @@ from pyk.kore.rpc import LogEntry from ..cterm.cterm import remove_useless_constraints -from ..kast.att import AttEntry, Atts from ..kast.inner import KInner, Subst from ..kast.manip import flatten_label, free_vars, ml_pred_to_bool from ..kast.outer import KFlatModule, KImport, KRule from ..kcfg import KCFG, KCFGStore from ..kcfg.exploration import KCFGExploration from ..konvert import kflatmodule_to_kore +from ..ktool.claim_index import ClaimIndex from ..prelude.ml import mlAnd, mlTop from ..utils import FrozenDict, ensure_dir_path, hash_str, shorten_hashes, single from .implies import ProofSummary, Prover, RefutationProof @@ -423,74 +422,28 @@ def from_spec_modules( logs: dict[int, tuple[LogEntry, ...]], proof_dir: Path | None = None, spec_labels: Iterable[str] | None = None, - **kwargs: Any, ) -> list[APRProof]: - claims_by_label = {claim.label: claim for module in spec_modules.modules for claim in module.claims} - if spec_labels is None: - spec_labels = list(claims_by_label.keys()) - _spec_labels = [] - for spec_label in spec_labels: - if spec_label in claims_by_label: - _spec_labels.append(spec_label) - elif f'{spec_modules.main_module}.{spec_label}' in claims_by_label: - _spec_labels.append(f'{spec_modules.main_module}.{spec_label}') + claim_index = ClaimIndex.from_module_list(spec_modules) + spec_labels = claim_index.labels(include=spec_labels, with_depends=True, ordered=True) + + res: list[APRProof] = [] + + for label in spec_labels: + if proof_dir is not None and Proof.proof_data_exists(label, proof_dir): + apr_proof = APRProof.read_proof_data(proof_dir, label) else: - raise ValueError( - f'Could not find specification label: {spec_label} or {spec_modules.main_module}.{spec_label}' + _LOGGER.info(f'Building APRProof for claim: {label}') + claim = claim_index[label] + apr_proof = APRProof.from_claim( + defn, + claim, + logs=logs, + proof_dir=proof_dir, ) - spec_labels = _spec_labels - - claims_graph: dict[str, list[str]] = {} - unfound_dependencies = [] - for module in spec_modules.modules: - for claim in module.claims: - claims_graph[claim.label] = [] - for dependency in claim.dependencies: - if dependency in claims_by_label: - claims_graph[claim.label].append(dependency) - elif f'{module.name}.{dependency}' in claims_by_label: - claims_graph[claim.label].append(f'{module.name}.{dependency}') - else: - unfound_dependencies.append((claim.label, module.name, dependency)) - if unfound_dependencies: - unfound_dependency_list = [ - f'Could not find dependency for claim {label}: {dependency} or {module_name}.{dependency}' - for label, module_name, dependency in unfound_dependencies - ] - unfound_dependency_message = '\n - ' + '\n - '.join(unfound_dependency_list) - raise ValueError(f'Could not find dependencies:{unfound_dependency_message}') - - claims_subgraph: dict[str, list[str]] = {} - remaining_claims = spec_labels - while len(remaining_claims) > 0: - claim_label = remaining_claims.pop() - claims_subgraph[claim_label] = claims_graph[claim_label] - remaining_claims.extend(claims_graph[claim_label]) - - topological_sorter = graphlib.TopologicalSorter(claims_subgraph) - topological_sorter.prepare() - apr_proofs: list[APRProof] = [] - while topological_sorter.is_active(): - for claim_label in topological_sorter.get_ready(): - if proof_dir is not None and Proof.proof_data_exists(claim_label, proof_dir): - apr_proof = APRProof.read_proof_data(proof_dir, claim_label) - else: - _LOGGER.info(f'Building APRProof for claim: {claim_label}') - claim = claims_by_label[claim_label] - if len(claims_graph[claim_label]) > 0: - claim_att = claim.att.update([AttEntry(Atts.DEPENDS, ','.join(claims_graph[claim_label]))]) - claim = claim.let_att(claim_att) - apr_proof = APRProof.from_claim( - defn, - claim, - logs=logs, - proof_dir=proof_dir, - ) - apr_proof.write_proof_data() - apr_proofs.append(apr_proof) - topological_sorter.done(claim_label) - - return apr_proofs + apr_proof.write_proof_data() + res.append(apr_proof) + + return res def path_constraints(self, final_node_id: NodeIdLike) -> KInner: path = self.shortest_path_to(final_node_id) diff --git a/pyk/src/tests/integration/ktool/test_imp.py b/pyk/src/tests/integration/ktool/test_imp.py index 347c12621c2..66b45faa559 100644 --- a/pyk/src/tests/integration/ktool/test_imp.py +++ b/pyk/src/tests/integration/ktool/test_imp.py @@ -10,7 +10,7 @@ from pyk.cli.pyk import ProveOptions from pyk.kast.inner import KApply, KSequence, KVariable from pyk.kcfg.semantics import KCFGSemantics -from pyk.ktool.kprove import ProveRpc +from pyk.ktool.prove_rpc import ProveRpc from pyk.proof import ProofStatus from pyk.testing import KCFGExploreTest, KProveTest from pyk.utils import single From 1de58c1a658c9ea18d90d20d86ad37db40a215d5 Mon Sep 17 00:00:00 2001 From: Dwight Guth Date: Tue, 25 Jun 2024 02:42:06 -0500 Subject: [PATCH 08/10] add two important missing flags to the llvm backend compiler (#4472) This PR fixes a small fraction of a significant piece of technical debt in pyk's implementation of `pyk.ktool.kompile`, adding support for two command line flags that are important that were currently unable to be passed to the compiler due to its design. This is by no means an exhaustive list. There are still other important flags that are unsupported. This is simply what is likely to block me in the immediate future. Co-authored-by: Bruce Collie Co-authored-by: rv-jenkins --- pyk/src/pyk/ktool/kompile.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pyk/src/pyk/ktool/kompile.py b/pyk/src/pyk/ktool/kompile.py index 17c9b6d4bf8..fa70fa496e0 100644 --- a/pyk/src/pyk/ktool/kompile.py +++ b/pyk/src/pyk/ktool/kompile.py @@ -20,6 +20,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Mapping + from fractions import Fraction from typing import Any, Final, Literal from ..utils import BugReport @@ -421,6 +422,8 @@ class LLVMKompile(Kompile): enable_llvm_debug: bool llvm_proof_hint_instrumentation: bool llvm_mutable_bytes: bool + iterated_threshold: Fraction | None + heuristic: str | None def __init__( self, @@ -435,6 +438,8 @@ def __init__( enable_llvm_debug: bool = False, llvm_proof_hint_instrumentation: bool = False, llvm_mutable_bytes: bool = False, + iterated_threshold: Fraction | None = None, + heuristic: str | None = None, ): llvm_kompile_type = LLVMKompileType(llvm_kompile_type) if llvm_kompile_type is not None else None llvm_kompile_output = Path(llvm_kompile_output) if llvm_kompile_output is not None else None @@ -455,6 +460,8 @@ def __init__( object.__setattr__(self, 'enable_llvm_debug', enable_llvm_debug) object.__setattr__(self, 'llvm_proof_hint_instrumentation', llvm_proof_hint_instrumentation) object.__setattr__(self, 'llvm_mutable_bytes', llvm_mutable_bytes) + object.__setattr__(self, 'iterated_threshold', iterated_threshold) + object.__setattr__(self, 'heuristic', heuristic) @property def backend(self) -> Literal[KompileBackend.LLVM]: @@ -491,6 +498,12 @@ def args(self) -> list[str]: if self.llvm_mutable_bytes: args += ['--llvm-mutable-bytes'] + if self.iterated_threshold: + args += ['--iterated-threshold', str(self.iterated_threshold)] + + if self.heuristic: + args += ['--heuristic', self.heuristic] + return args From 531652d017eb47ef00e0320e5718001118ff073f Mon Sep 17 00:00:00 2001 From: Bruce Collie Date: Tue, 25 Jun 2024 11:17:03 +0100 Subject: [PATCH 09/10] Use prebuilt haskell backend (#4473) This PR uses the existing infrastructure we have for the LLVM backend to download a pre-packaged release of the Haskell backend when running the K integration tests. It's a bit obscured currently because of degraded CI performance generally, but this should take average-case CI run times down to 20-25 minutes. --- .github/actions/with-k-docker/Dockerfile | 4 +++- .github/actions/with-k-docker/action.yml | 10 +++++++++- package/debian/kframework-frontend/rules.jammy | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/actions/with-k-docker/Dockerfile b/.github/actions/with-k-docker/Dockerfile index 97df8ea4fb5..09f800e3669 100644 --- a/.github/actions/with-k-docker/Dockerfile +++ b/.github/actions/with-k-docker/Dockerfile @@ -5,9 +5,11 @@ FROM ubuntu:${BASE_DISTRO} ARG K_DEB_PATH ARG INSTALL_BACKEND_DEBS ARG LLVM_BACKEND_DEB_PATH +ARG HASKELL_BACKEND_DEB_PATH COPY ${K_DEB_PATH} /kframework.deb COPY ${LLVM_BACKEND_DEB_PATH} /llvm-backend.deb +COPY ${HASKELL_BACKEND_DEB_PATH} /haskell-backend.deb RUN apt-get -y update \ && apt-get -y install \ @@ -19,7 +21,7 @@ RUN apt-get -y update \ /kframework.deb RUN if [ "${INSTALL_BACKEND_DEBS}" = "true" ]; then \ - apt-get -y install /llvm-backend.deb; \ + apt-get -y install /llvm-backend.deb /haskell-backend.deb; \ fi RUN apt-get -y clean diff --git a/.github/actions/with-k-docker/action.yml b/.github/actions/with-k-docker/action.yml index 62c75bed3e1..f5b67cabdc2 100644 --- a/.github/actions/with-k-docker/action.yml +++ b/.github/actions/with-k-docker/action.yml @@ -42,6 +42,7 @@ runs: DOCKERFILE=${{ github.action_path }}/Dockerfile K_DEB_PATH=${{ inputs.k-deb-path }} LLVM_BACKEND_DEB_PATH=llvm-backend.deb + HASKELL_BACKEND_DEB_PATH=haskell-backend.deb gh release download \ --repo runtimeverification/llvm-backend \ @@ -49,13 +50,20 @@ runs: --output "${LLVM_BACKEND_DEB_PATH}" \ v$(cat deps/llvm-backend_release) + gh release download \ + --repo runtimeverification/haskell-backend \ + --pattern "*ubuntu_${BASE_DISTRO}.deb" \ + --output "${HASKELL_BACKEND_DEB_PATH}" \ + $(cat deps/haskell-backend_release) + docker build . \ --file ${DOCKERFILE} \ --tag ${TAG} \ --build-arg BASE_DISTRO=${BASE_DISTRO} \ --build-arg K_DEB_PATH=${K_DEB_PATH} \ --build-arg INSTALL_BACKEND_DEBS=${INSTALL_BACKEND_DEBS} \ - --build-arg LLVM_BACKEND_DEB_PATH=${LLVM_BACKEND_DEB_PATH} + --build-arg LLVM_BACKEND_DEB_PATH=${LLVM_BACKEND_DEB_PATH} \ + --build-arg HASKELL_BACKEND_DEB_PATH=${HASKELL_BACKEND_DEB_PATH} - name: 'Run Docker container' shell: bash {0} diff --git a/package/debian/kframework-frontend/rules.jammy b/package/debian/kframework-frontend/rules.jammy index 5f62a9bc885..def7df0d9e3 100755 --- a/package/debian/kframework-frontend/rules.jammy +++ b/package/debian/kframework-frontend/rules.jammy @@ -23,7 +23,7 @@ export PREFIX dh $@ override_dh_auto_build: - mvn --batch-mode package -DskipTests -Dllvm.backend.skip + mvn --batch-mode package -DskipTests -Dllvm.backend.skip -Dhaskell.backend.skip override_dh_auto_install: package/package --frontend From 94a069d50f711874987b0331eb466df672683e49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrei=20V=C4=83caru?= <16517508+anvacaru@users.noreply.github.com> Date: Tue, 25 Jun 2024 16:19:48 +0300 Subject: [PATCH 10/10] Add missing operators to prelude.kint (#4477) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding new functions to the `KInt` module for Integer operations described in [domains.md](https://github.com/runtimeverification/k/blob/master/pyk/src/tests/profiling/test-data/domains.md?plain=1#L1173) for: - integer arithmetic - integer min and max - integer absolute value Adding docstrings to KInt module. Adding labels for `minInt`, `maxInt`, and `absInt`. --------- Co-authored-by: Tamás Tóth --- pyk/src/pyk/prelude/kint.py | 299 +++++++++++++++++++ pyk/src/tests/profiling/test-data/domains.md | 6 +- 2 files changed, 302 insertions(+), 3 deletions(-) diff --git a/pyk/src/pyk/prelude/kint.py b/pyk/src/pyk/prelude/kint.py index 28a3eff5ff6..c4ed0dd7808 100644 --- a/pyk/src/pyk/prelude/kint.py +++ b/pyk/src/pyk/prelude/kint.py @@ -13,24 +13,323 @@ def intToken(i: int) -> KToken: # noqa: N802 + r"""Instantiate the KAST term ``#token(i, "Int")``. + + Args: + i: The integer literal. + + Returns: + The KAST term ``#token(i, "Int")``. + """ return KToken(str(i), INT) def ltInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_ KApply: # noqa: N802 + r"""Instantiate the KAST term ```_<=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_<=Int_`(i1, i2)``. + """ return KApply('_<=Int_', i1, i2) def gtInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>Int_`(i1, i2)``. + """ return KApply('_>Int_', i1, i2) def geInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>=Int_`(i1, i2)``. + """ return KApply('_>=Int_', i1, i2) def eqInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_==Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_==Int_`(i1, i2)``. + """ return KApply('_==Int_', i1, i2) + + +def neqInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_=/=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_=/=Int_`(i1, i2)``. + """ + return KApply('_=/=Int_', i1, i2) + + +def notInt(i: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```~Int_`(i)``. + + Args: + i: The integer operand. + + Returns: + The KAST term ```Int_`(i)``. + """ + return KApply('~Int_', i) + + +def expInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_^Int_`(i1, i2)``. + + Args: + i1: The base. + i2: The exponent. + + Returns: + The KAST term ```_^Int_`(i1, i2)``. + """ + return KApply('_^Int_', i1, i2) + + +def expModInt(i1: KInner, i2: KInner, i3: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_^%Int__`(i1, i2, i3)``. + + Args: + i1: The dividend. + i2: The divisior. + i3: The modulus. + + Returns: + The KAST term ```_^%Int__`(i1, i2, i3)``. + """ + return KApply('_^%Int__', i1, i2, i3) + + +def mulInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_*Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_*Int_`(i1, i2)``. + """ + return KApply('_*Int_', i1, i2) + + +def divInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_/Int_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_/Int_`(i1, i2)``. + """ + return KApply('_/Int_', i1, i2) + + +def modInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_%Int_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_%Int_`(i1, i2)``. + """ + return KApply('_%Int_', i1, i2) + + +def euclidDivInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_divInt_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_divInt_`(i1, i2)``. + """ + return KApply('_divInt_', i1, i2) + + +def euclidModInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_modInt_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_modInt_`(i1, i2)``. + """ + return KApply('_modInt_', i1, i2) + + +def addInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_+Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_+Int_`(i1, i2)``. + """ + return KApply('_+Int_', i1, i2) + + +def subInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_-Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_-Int_`(i1, i2)``. + """ + return KApply('_-Int_', i1, i2) + + +def rshiftInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>>Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>>Int_`(i1, i2)``. + """ + return KApply('_>>Int_', i1, i2) + + +def lshiftInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_< KApply: # noqa: N802 + r"""Instantiate the KAST term ```_&Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_&Int_`(i1, i2)``. + """ + return KApply('_&Int_', i1, i2) + + +def xorInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_xorInt_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_xorInt_`(i1, i2)``. + """ + return KApply('_xorInt_', i1, i2) + + +def orInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_|Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_|Int_`(i1, i2)``. + """ + return KApply('_|Int_', i1, i2) + + +def minInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```minInt`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```minInt`(i1, i2)``. + """ + return KApply('minInt', i1, i2) + + +def maxInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```maxInt`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```maxInt`(i1, i2)``. + """ + return KApply('maxInt', i1, i2) + + +def absInt(i: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```absInt`(i)``. + + Args: + i: The integer operand. + + Returns: + The KAST term ```absInt`(i)``. + """ + return KApply('absInt', i) diff --git a/pyk/src/tests/profiling/test-data/domains.md b/pyk/src/tests/profiling/test-data/domains.md index 1f42f186f28..be0803144fb 100644 --- a/pyk/src/tests/profiling/test-data/domains.md +++ b/pyk/src/tests/profiling/test-data/domains.md @@ -1257,8 +1257,8 @@ You can: You can compute the minimum and maximum `minInt` and `maxInt` of two integers. ```k - syntax Int ::= "minInt" "(" Int "," Int ")" [function, total, smt-hook((ite (< #1 #2) #1 #2)), hook(INT.min)] - | "maxInt" "(" Int "," Int ")" [function, total, smt-hook((ite (< #1 #2) #2 #1)), hook(INT.max)] + syntax Int ::= "minInt" "(" Int "," Int ")" [function, total, symbol(minInt), smt-hook((ite (< #1 #2) #1 #2)), hook(INT.min)] + | "maxInt" "(" Int "," Int ")" [function, total, symbol(maxInt), smt-hook((ite (< #1 #2) #2 #1)), hook(INT.max)] ``` ### Absolute value @@ -1266,7 +1266,7 @@ You can compute the minimum and maximum `minInt` and `maxInt` of two integers. You can compute the absolute value `absInt` of an integer. ```k - syntax Int ::= absInt ( Int ) [function, total, smt-hook((ite (< #1 0) (- 0 #1) #1)), hook(INT.abs)] + syntax Int ::= absInt ( Int ) [function, total, symbol(absInt), smt-hook((ite (< #1 0) (- 0 #1) #1)), hook(INT.abs)] ``` ### Log base 2