Skip to content

Commit

Permalink
Support PyPy 3.5
Browse files Browse the repository at this point in the history
PyPy 3.5 supports the features of Python 3.5 as well as f-strings.

Python 3.6's variable type hints are no longer used, and a few things
that don't appear in 3.5's typing module are stringed out.

PyPy doesn't like mypy, so that isn't run.

CPython 3.5 could be supported by getting rid of f-strings.
  • Loading branch information
blyxxyz committed Jun 25, 2019
1 parent 64bf3b6 commit d93a088
Show file tree
Hide file tree
Showing 7 changed files with 69 additions and 44 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ language: python
python:
- "3.6"
- "3.7"
- "pypy3.5"

install:
- pip install tox-travis
Expand Down
61 changes: 34 additions & 27 deletions nnf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
'decision', 'true', 'false', 'dsharp', 'dimacs', 'amc')


def all_models(names: t.Collection[Name]) -> t.Iterator[Model]:
def all_models(names: 't.Collection[Name]') -> t.Iterator[Model]:
"""Yield dictionaries with all possible boolean values for the names.
>>> list(all_models(["a", "b"]))
Expand Down Expand Up @@ -138,7 +138,7 @@ def var(node: NNF) -> t.FrozenSet[Name]:

for node in self.walk():
if isinstance(node, And):
seen: t.Set[Name] = set()
seen = set() # type: t.Set[Name]
for child in node.children:
for name in var(child):
if name in seen:
Expand Down Expand Up @@ -368,7 +368,7 @@ def to_model(self) -> Model:
if not isinstance(self, And):
raise TypeError("A sentence can only be converted to a model if "
"it's a conjunction of variables.")
model: Model = {}
model = {} # type: Model
for child in self.children:
if not isinstance(child, Var):
raise TypeError("A sentence can only be converted to a "
Expand Down Expand Up @@ -407,15 +407,15 @@ def filler(name: Name) -> 'Or':

@memoize
def smooth(node: NNF) -> NNF:
new: NNF
if isinstance(node, And):
new = And(smooth(child) for child in node.children)
new = And(smooth(child)
for child in node.children) # type: NNF
elif isinstance(node, Var):
return node
elif isinstance(node, Or):
names = node.vars()
children = {smooth(child) for child in node.children}
smoothed: t.Set[NNF] = set()
smoothed = set() # type: t.Set[NNF]
for child in children:
child_names = child.vars()
if len(child_names) < len(names):
Expand Down Expand Up @@ -454,7 +454,7 @@ def simplify(self, merge_nodes: bool = True) -> 'NNF':
def simple(node: NNF) -> NNF:
if isinstance(node, Var):
return node
new_children: t.Set[NNF] = set()
new_children = set() # type: t.Set[NNF]
if isinstance(node, Or):
for child in map(simple, node.children):
if child == true:
Expand Down Expand Up @@ -500,7 +500,7 @@ def deduplicate(self) -> 'NNF':
In a lot of cases it's better to avoid the duplication in the first
place, for example with a Builder object.
"""
new_nodes: t.Dict[NNF, NNF] = {}
new_nodes = {} # type: t.Dict[NNF, NNF]

def recreate(node: NNF) -> NNF:
if node not in new_nodes:
Expand All @@ -518,7 +518,7 @@ def recreate(node: NNF) -> NNF:

def object_count(self) -> int:
"""Return the number of distinct node objects in the sentence."""
ids: t.Set[int] = set()
ids = set() # type: t.Set[int]

def count(node: NNF) -> None:
ids.add(id(node))
Expand All @@ -540,8 +540,8 @@ def to_DOT(self, color: bool = False) -> str:
# - add own directives
# - set different palette
counter = itertools.count()
names: t.Dict[NNF, t.Tuple[int, str, str]] = {}
arrows: t.List[t.Tuple[int, int]] = []
names = {} # type: t.Dict[NNF, t.Tuple[int, str, str]]
arrows = [] # type: t.List[t.Tuple[int, int]]

def name(node: NNF) -> int:
if node not in names:
Expand Down Expand Up @@ -617,7 +617,7 @@ def extract(node: NNF) -> t.Set[ModelInt]:
for child in node.children
for model in extract(child)}
elif isinstance(node, And):
models: t.Set[ModelInt] = {frozenset()}
models = {frozenset()} # type: t.Set[ModelInt]
for child in node.children:
models = {existing | new
for new in extract(child)
Expand All @@ -627,7 +627,7 @@ def extract(node: NNF) -> t.Set[ModelInt]:
raise TypeError(node)

names = self.vars()
full_models: t.Set[ModelInt] = set()
full_models = set() # type: t.Set[ModelInt]

def complete(
model: ModelInt,
Expand All @@ -651,7 +651,7 @@ def _models_decomposable(self) -> t.Iterator[Model]:
if not self.satisfiable(decomposable=True):
return
names = tuple(self.vars())
model_tree: t.Dict[bool, t.Any] = {}
model_tree = {} # type: t.Dict[bool, t.Any]

def leaves(
tree: t.Dict[bool, t.Any],
Expand Down Expand Up @@ -739,14 +739,18 @@ class Var(NNF):
negated.
"""

name: Name
true: bool

__slots__ = ('name', 'true')

def __init__(self, name: Name, true: bool = True) -> None:
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'true', true)
if t.TYPE_CHECKING:
def __init__(self, name: Name, true: bool = True) -> None:
# For the typechecker
self.name = name
self.true = true
else:
def __init__(self, name: Name, true: bool = True) -> None:
# For immutability
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'true', true)

def __eq__(self, other: t.Any) -> t.Any:
if self.__class__ is other.__class__:
Expand Down Expand Up @@ -781,11 +785,14 @@ def _sorting_key(self) -> t.Tuple[bool, str, bool]:

class Internal(NNF):
"""Base class for internal nodes, i.e. And and Or nodes."""
children: t.FrozenSet[NNF]

def __init__(self, children: t.Iterable[NNF] = ()) -> None:
# needed because of immutability
object.__setattr__(self, 'children', frozenset(children))
if t.TYPE_CHECKING:
def __init__(self, children: t.Iterable[NNF] = ()) -> None:
# For the typechecker
self.children = frozenset(children)
else:
def __init__(self, children: t.Iterable[NNF] = ()) -> None:
# For immutability
object.__setattr__(self, 'children', frozenset(children))

def __eq__(self, other: t.Any) -> t.Any:
if self.__class__ is other.__class__:
Expand Down Expand Up @@ -815,7 +822,7 @@ def leaf(self) -> bool:

def _is_simple(self) -> bool:
"""Whether all children are leaves that don't share variables."""
variables: t.Set[Name] = set()
variables = set() # type: t.Set[Name]
for child in self.children:
if not child.leaf():
return False
Expand Down Expand Up @@ -935,7 +942,7 @@ class Builder:
# TODO: deduplicate vars that are negated using the operator
def __init__(self, seed: t.Iterable[NNF] = ()):
""":param seed: Nodes to store for reuse in advance."""
self.stored: t.Dict[NNF, NNF] = {true: true, false: false}
self.stored = {true: true, false: false} # type: t.Dict[NNF, NNF]
for node in seed:
self.stored[node] = node
self.true = true
Expand Down
2 changes: 1 addition & 1 deletion nnf/amc.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def add(a: T, b: T) -> T:

if isinstance(node, Or):
best = add_neut
candidates: t.List[NNF] = []
candidates = [] # type: t.List[NNF]
for child in node.children:
value = eval(child, add, mul, add_neut, mul_neut, labeling)
if value > best: # type: ignore
Expand Down
24 changes: 13 additions & 11 deletions nnf/dimacs.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,19 @@ def dump(
:param mode: Either ``'sat'`` to dump in the general SAT format,
or ``'cnf'`` to dump in the specialized CNF format.
"""
num_vars: int
if num_variables is None:
if var_labels is None:
names: t.FrozenSet[int] = obj.vars() # type: ignore
if t.TYPE_CHECKING:
names = frozenset() # type: t.FrozenSet[int]
else:
names = obj.vars()
for name in names:
if not isinstance(name, int) or name <= 0:
raise TypeError(
f"{name!r} is not an integer > 0. Try supplying a "
"var_labels dictionary."
)
num_vars = max(names, default=0)
num_vars = max(names, default=0) # type: int
else:
num_vars = max(var_labels.values(), default=0)
else:
Expand Down Expand Up @@ -94,7 +96,7 @@ def _dump_sat(
*,
num_variables: int,
var_labels: t.Optional[t.Dict[Name, int]] = None,
comment_header: t.Optional[str] = None,
comment_header: t.Optional[str] = None
) -> None:
if comment_header is not None:
_write_comments(comment_header, fp)
Expand Down Expand Up @@ -128,7 +130,7 @@ def _dump_cnf(
*,
num_variables: int,
var_labels: t.Optional[t.Dict[Name, int]] = None,
comment_header: t.Optional[str] = None,
comment_header: t.Optional[str] = None
) -> None:
if not isinstance(obj, And):
raise TypeError("CNF sentences must be conjunctions")
Expand Down Expand Up @@ -213,7 +215,7 @@ def loads(s: str) -> NNF:


def _load_sat(fp: t.TextIO) -> NNF:
tokens: t.Deque[str] = collections.deque()
tokens = collections.deque() # type: t.Deque[str]
for line in fp:
if line.startswith('c'):
continue
Expand All @@ -231,7 +233,7 @@ def _load_sat(fp: t.TextIO) -> NNF:
return result


def _parse_sat(tokens: t.Deque[str]) -> NNF:
def _parse_sat(tokens: 't.Deque[str]') -> NNF:
cur = tokens.popleft()
if cur == '(':
content = _parse_sat(tokens)
Expand Down Expand Up @@ -269,7 +271,7 @@ def _parse_sat(tokens: t.Deque[str]) -> NNF:


def _load_cnf(fp: t.TextIO) -> NNF:
tokens: t.Deque[str] = collections.deque()
tokens = [] # type: t.List[str]
for line in fp:
if line.startswith('c'):
continue
Expand All @@ -280,9 +282,9 @@ def _load_cnf(fp: t.TextIO) -> NNF:
return _parse_cnf(tokens)


def _parse_cnf(tokens: t.Deque[str]) -> NNF:
clauses: t.Set[Or] = set()
clause: t.Set[Var] = set()
def _parse_cnf(tokens: t.Iterable[str]) -> NNF:
clauses = set() # type: t.Set[Or]
clause = set() # type: t.Set[Var]
for token in tokens:
if token == '0':
if clause:
Expand Down
2 changes: 1 addition & 1 deletion nnf/dsharp.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def load(fp: t.TextIO) -> NNF:
fmt, nodecount, edges, varcount = fp.readline().split()
node_specs = dict(enumerate(line.split() for line in fp))
assert fmt == 'nnf'
nodes: t.Dict[int, NNF] = {}
nodes = {} # type: t.Dict[int, NNF]
for num, spec in node_specs.items():
if spec[0] == 'L':
if spec[1].startswith('-'):
Expand Down
10 changes: 7 additions & 3 deletions test_nnf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@

import pytest

from hypothesis import assume, event, given, strategies as st
from hypothesis import (assume, event, given, strategies as st, settings,
HealthCheck)

import nnf

from nnf import Var, And, Or, amc, dimacs, dsharp

settings.register_profile('patient', deadline=500)
settings.load_profile('patient')

a, b, c = Var('a'), Var('b'), Var('c')

fig1a = (~a & b) | (a & ~b)
Expand Down Expand Up @@ -169,6 +173,7 @@ def merge_nodes(request):
return request.param


@settings(suppress_health_check=(HealthCheck.too_slow,))
@given(sentence=DNNF())
def test_DNNF_sat_strategies(sentence: nnf.NNF, merge_nodes):
sat = sentence.satisfiable()
Expand Down Expand Up @@ -365,8 +370,7 @@ def test_to_model(model: dict):
def test_models_smart_equivalence(sentence: nnf.NNF):
dumb = list(sentence.models())
smart = list(sentence._models_deterministic())
assert len(dumb) == len(smart)
assert all(sentence.satisfied_by(model) for model in smart)
assert model_set(dumb) == model_set(smart)


@pytest.mark.parametrize(
Expand Down
13 changes: 12 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tox]
envlist = py36, py37
envlist = py36, py37, pypy3

[testenv]
deps =
Expand All @@ -13,3 +13,14 @@ commands =
pytest
python -m doctest nnf/__init__.py -o ELLIPSIS
python examples/socialchoice.py

[testenv:pypy3]
deps =
pytest
hypothesis
flake8
commands =
flake8 nnf
pytest
python -m doctest nnf/__init__.py -o ELLIPSIS
python examples/socialchoice.py

0 comments on commit d93a088

Please sign in to comment.