diff --git a/.github/workflows/examples.yml b/.github/workflows/main.yml similarity index 59% rename from .github/workflows/examples.yml rename to .github/workflows/main.yml index 0ccca7412..60be828a1 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/main.yml @@ -1,14 +1,30 @@ -name: Check Examples +name: Tests -on: [pull_request] +on: [push, pull_request] jobs: - run: + Linting: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Set up Python 3.7 + uses: actions/setup-python@v1 + with: + python-version: 3.7 + - name: Linting + run: | + pip install pre-commit + pre-commit run --all-files + + Linux: + needs: Linting runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: python-version: [3.6, 3.7, 3.8] + steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} @@ -18,12 +34,14 @@ jobs: - name: Install dependencies run: | pip install -r requirements-dev.txt - pip install papermill - - name: Execute notebooks with papermill + - name: Test with pytest run: | + py.test test --cov=pysindy + - name: Execute feature notebook with papermill + run: | + pip install papermill cd example papermill --report-mode feature_overview.ipynb out.json - # for nb in *.ipynb; do papermill --report-mode $nb out.json; done - uses: actions/cache@v1 with: path: ~/.cache/pip diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 12e000415..000000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Build CI - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: [3.6, 3.7, 3.8] - - steps: - - uses: actions/checkout@v1 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install -r requirements-dev.txt - - name: Test with pytest - run: | - py.test test --cov=sindy --flake8 - - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements-dev.txt') }} - restore-keys: | - ${{ runner.os }}-pip- diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e85a387ff..33364e283 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,3 +14,8 @@ repos: rev: stable hooks: - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: master + hooks: + - id: flake8 + args: ["--config=setup.cfg"] diff --git a/README.rst b/README.rst index 498552609..ca408241b 100644 --- a/README.rst +++ b/README.rst @@ -42,7 +42,11 @@ Community guidelines Contributing code ^^^^^^^^^^^^^^^^^ -We welcome contributions to PySINDy. To contribute a new feature please submit a pull request. To be accepted your code should conform to PEP8 (you may choose to use flake8 to test this before submitting your pull request). Your contributed code should pass all unit tests. Upon submission of a pull request, your code will be tested automatically, but you may also choose to test it yourself by running +We welcome contributions to PySINDy. To contribute a new feature please submit a pull request. To be accepted your code should conform to PEP8 (you may choose to use flake8 to test this before submitting your pull request). Your contributed code should pass all unit tests. Upon submission of a pull request, your code will be linted and tested automatically, but you may also choose to list it yourself invoking + +``pre-commit -a -v`` + +as well as test it yourself by running ``pytest`` diff --git a/docs/conf.py b/docs/conf.py index 961448dc2..78ffe2861 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,7 +7,7 @@ project = "pysindy" # package name -#### no need to edit below this line ## +# no need to edit below this line copyright = f"{datetime.datetime.now().year}, {author}" diff --git a/pysindy/__init__.py b/pysindy/__init__.py index fc73c1218..27ac09f1b 100644 --- a/pysindy/__init__.py +++ b/pysindy/__init__.py @@ -1,4 +1,5 @@ -from pkg_resources import get_distribution, DistributionNotFound +from pkg_resources import DistributionNotFound +from pkg_resources import get_distribution try: __version__ = get_distribution(__name__).version diff --git a/pysindy/differentiation/base.py b/pysindy/differentiation/base.py index 8aba28e9b..5efe5cab7 100644 --- a/pysindy/differentiation/base.py +++ b/pysindy/differentiation/base.py @@ -1,7 +1,6 @@ """ Base class for numerical differentiation methods """ - import abc from pysindy.utils.base import validate_input diff --git a/pysindy/feature_library/__init__.py b/pysindy/feature_library/__init__.py index b8c04a022..47c63d93c 100644 --- a/pysindy/feature_library/__init__.py +++ b/pysindy/feature_library/__init__.py @@ -1,4 +1,4 @@ +from .custom_library import CustomLibrary from .feature_library import BaseFeatureLibrary -from .polynomial_library import PolynomialLibrary from .fourier_library import FourierLibrary -from .custom_library import CustomLibrary +from .polynomial_library import PolynomialLibrary diff --git a/pysindy/feature_library/custom_library.py b/pysindy/feature_library/custom_library.py index 083e1be62..b87d33815 100644 --- a/pysindy/feature_library/custom_library.py +++ b/pysindy/feature_library/custom_library.py @@ -1,11 +1,11 @@ -from pysindy.feature_library import BaseFeatureLibrary +from itertools import combinations +from itertools import combinations_with_replacement as combinations_w_r +import numpy as np from sklearn.utils import check_array from sklearn.utils.validation import check_is_fitted -import numpy as np -from itertools import combinations -from itertools import combinations_with_replacement as combinations_w_r +from pysindy.feature_library import BaseFeatureLibrary class CustomLibrary(BaseFeatureLibrary): @@ -23,9 +23,9 @@ class CustomLibrary(BaseFeatureLibrary): a variable name), and output a string depiction of the respective mathematical function applied to that variable. For example, if the first library function is sine, the name function might return - :math:`\sin(x)` given :math:`x` as input. The function_names list must be the + :math:`\\sin(x)` given :math:`x` as input. The function_names list must be the same length as library_functions. If no list of function names is - provided, defaults to using :math:`[ f_0(x),f_1(x), f_2(x), \ldots ]`. + provided, defaults to using :math:`[ f_0(x),f_1(x), f_2(x), \\ldots ]`. Attributes ---------- diff --git a/pysindy/feature_library/fourier_library.py b/pysindy/feature_library/fourier_library.py index 50cb8e809..ed5d62923 100644 --- a/pysindy/feature_library/fourier_library.py +++ b/pysindy/feature_library/fourier_library.py @@ -1,9 +1,8 @@ -from pysindy.feature_library import BaseFeatureLibrary - +import numpy as np from sklearn.utils import check_array from sklearn.utils.validation import check_is_fitted -import numpy as np +from pysindy.feature_library import BaseFeatureLibrary class FourierLibrary(BaseFeatureLibrary): @@ -38,9 +37,7 @@ class FourierLibrary(BaseFeatureLibrary): def __init__(self, n_frequencies=1, include_sin=True, include_cos=True): super(FourierLibrary, self).__init__() if not (include_sin or include_cos): - raise ValueError( - "include_sin and include_cos cannot both be False" - ) + raise ValueError("include_sin and include_cos cannot both be False") if n_frequencies < 1 or not isinstance(n_frequencies, int): raise ValueError("n_frequencies must be a positive integer") self.n_frequencies = n_frequencies @@ -68,13 +65,9 @@ def get_feature_names(self, input_features=None): for i in range(self.n_frequencies): for feature in input_features: if self.include_sin: - feature_names.append( - "sin(" + str(i + 1) + " " + feature + ")" - ) + feature_names.append("sin(" + str(i + 1) + " " + feature + ")") if self.include_cos: - feature_names.append( - "cos(" + str(i + 1) + " " + feature + ")" - ) + feature_names.append("cos(" + str(i + 1) + " " + feature + ")") return feature_names def fit(self, X, y=None): diff --git a/pysindy/feature_library/polynomial_library.py b/pysindy/feature_library/polynomial_library.py index 61444a584..e99a6af17 100644 --- a/pysindy/feature_library/polynomial_library.py +++ b/pysindy/feature_library/polynomial_library.py @@ -1,11 +1,14 @@ +from itertools import chain +from itertools import combinations +from itertools import combinations_with_replacement as combinations_w_r + import numpy as np from scipy import sparse -from itertools import chain, combinations -from itertools import combinations_with_replacement as combinations_w_r +from sklearn.preprocessing import _csr_polynomial_expansion from sklearn.preprocessing import PolynomialFeatures from sklearn.utils import check_array -from sklearn.utils.validation import check_is_fitted, FLOAT_DTYPES -from sklearn.preprocessing import _csr_polynomial_expansion +from sklearn.utils.validation import check_is_fitted +from sklearn.utils.validation import FLOAT_DTYPES from pysindy.feature_library import BaseFeatureLibrary diff --git a/pysindy/optimizers/__init__.py b/pysindy/optimizers/__init__.py index 1c723f0c0..cbdda9608 100644 --- a/pysindy/optimizers/__init__.py +++ b/pysindy/optimizers/__init__.py @@ -1,5 +1,5 @@ from .base import BaseOptimizer -from .stlsq import STLSQ -from .sr3 import SR3 -from .lasso import LASSO from .elastic_net import ElasticNet +from .lasso import LASSO +from .sr3 import SR3 +from .stlsq import STLSQ diff --git a/pysindy/optimizers/base.py b/pysindy/optimizers/base.py index 2d7dad20e..222664e3f 100644 --- a/pysindy/optimizers/base.py +++ b/pysindy/optimizers/base.py @@ -1,14 +1,13 @@ """ Base class for SINDy optimizers. """ - import abc import numpy as np from scipy import sparse from sklearn.linear_model import LinearRegression -from sklearn.utils.validation import check_X_y from sklearn.utils.extmath import safe_sparse_dot +from sklearn.utils.validation import check_X_y def _rescale_data(X, y, sample_weight): @@ -16,13 +15,9 @@ def _rescale_data(X, y, sample_weight): n_samples = X.shape[0] sample_weight = np.asarray(sample_weight) if sample_weight.ndim == 0: - sample_weight = np.full( - n_samples, sample_weight, dtype=sample_weight.dtype - ) + sample_weight = np.full(n_samples, sample_weight, dtype=sample_weight.dtype) sample_weight = np.sqrt(sample_weight) - sw_matrix = sparse.dia_matrix( - (sample_weight, 0), shape=(n_samples, n_samples) - ) + sw_matrix = sparse.dia_matrix((sample_weight, 0), shape=(n_samples, n_samples)) X = safe_sparse_dot(sw_matrix, X) y = safe_sparse_dot(sw_matrix, y) return X, y @@ -95,9 +90,7 @@ def fit(self, x_, y, sample_weight=None, **reduce_kws): ------- self : returns an instance of self """ - x_, y = check_X_y( - x_, y, accept_sparse=[], y_numeric=True, multi_output=False - ) + x_, y = check_X_y(x_, y, accept_sparse=[], y_numeric=True, multi_output=False) x, y, X_offset, y_offset, X_scale = self._preprocess_data( x_, diff --git a/pysindy/optimizers/elastic_net.py b/pysindy/optimizers/elastic_net.py index 478e539bd..6a2a0bd97 100644 --- a/pysindy/optimizers/elastic_net.py +++ b/pysindy/optimizers/elastic_net.py @@ -40,12 +40,7 @@ class ElasticNet(BaseOptimizer): """ def __init__( - self, - alpha=1.0, - l1_ratio=0.5, - max_iter=1000, - elastic_net_kw={}, - **kwargs + self, alpha=1.0, l1_ratio=0.5, max_iter=1000, elastic_net_kw={}, **kwargs ): super(ElasticNet, self).__init__(**kwargs) diff --git a/pysindy/optimizers/sr3.py b/pysindy/optimizers/sr3.py index 8b8fa769f..e704f64b1 100644 --- a/pysindy/optimizers/sr3.py +++ b/pysindy/optimizers/sr3.py @@ -1,7 +1,8 @@ import warnings import numpy as np -from scipy.linalg import cho_factor, cho_solve +from scipy.linalg import cho_factor +from scipy.linalg import cho_solve from sklearn.exceptions import ConvergenceWarning from pysindy.optimizers import BaseOptimizer @@ -54,13 +55,7 @@ class SR3(BaseOptimizer): """ def __init__( - self, - threshold=0.1, - nu=1.0, - tol=1e-5, - thresholder="l0", - max_iter=30, - **kwargs + self, threshold=0.1, nu=1.0, tol=1e-5, thresholder="l0", max_iter=30, **kwargs ): super(SR3, self).__init__(**kwargs) @@ -115,9 +110,7 @@ def _reduce(self, x, y): # Precompute some objects for upcoming least-squares solves. # Assumes that self.nu is fixed throughout optimization procedure. - cho = cho_factor( - np.dot(x.T, x) + np.diag(np.full(x.shape[1], 1.0 / self.nu)) - ) + cho = cho_factor(np.dot(x.T, x) + np.diag(np.full(x.shape[1], 1.0 / self.nu))) x_transpose_y = np.dot(x.T, y) for _ in range(self.max_iter): diff --git a/pysindy/optimizers/stlsq.py b/pysindy/optimizers/stlsq.py index c69b2416a..3f6326c46 100644 --- a/pysindy/optimizers/stlsq.py +++ b/pysindy/optimizers/stlsq.py @@ -1,8 +1,8 @@ import warnings import numpy as np -from sklearn.linear_model import ridge_regression from sklearn.exceptions import ConvergenceWarning +from sklearn.linear_model import ridge_regression from pysindy.optimizers import BaseOptimizer @@ -42,9 +42,7 @@ class STLSQ(BaseOptimizer): weight vector have not been masked out. """ - def __init__( - self, threshold=0.1, alpha=0.0, max_iter=20, ridge_kw=None, **kwargs - ): + def __init__(self, threshold=0.1, alpha=0.0, max_iter=20, ridge_kw=None, **kwargs): super(STLSQ, self).__init__(**kwargs) if threshold < 0: @@ -108,9 +106,7 @@ def _reduce(self, x, y): break coef = self._regress(x[:, ind], y) - coef, ind = self._sparse_coefficients( - n_features, ind, coef, self.threshold - ) + coef, ind = self._sparse_coefficients(n_features, ind, coef, self.threshold) if sum(ind) == n_features_selected or self._no_change(): # could not (further) select important features diff --git a/pysindy/pysindy.py b/pysindy/pysindy.py index cbde4b6d5..e65c969ef 100644 --- a/pysindy/pysindy.py +++ b/pysindy/pysindy.py @@ -1,16 +1,22 @@ +from numpy import isscalar +from numpy import ndim +from numpy import newaxis +from numpy import vstack +from numpy import zeros +from scipy.integrate import odeint from sklearn.base import BaseEstimator +from sklearn.exceptions import NotFittedError from sklearn.metrics import r2_score from sklearn.multioutput import MultiOutputRegressor from sklearn.pipeline import Pipeline from sklearn.preprocessing import PolynomialFeatures from sklearn.utils.validation import check_is_fitted -from sklearn.exceptions import NotFittedError -from scipy.integrate import odeint -from numpy import vstack, newaxis, zeros, isscalar, ndim from pysindy.differentiation import FiniteDifference from pysindy.optimizers import STLSQ -from pysindy.utils.base import equation, validate_input, drop_nan_rows +from pysindy.utils.base import drop_nan_rows +from pysindy.utils.base import equation +from pysindy.utils.base import validate_input class SINDy(BaseEstimator): @@ -134,9 +140,7 @@ def fit(self, x, t=1, x_dot=None, multiple_trajectories=False): self.model.fit(x, x_dot) - self.n_input_features_ = ( - self.model.estimators_[0].steps[0][1].n_input_features_ - ) + self.n_input_features_ = self.model.estimators_[0].steps[0][1].n_input_features_ self.n_output_features_ = ( self.model.estimators_[0].steps[0][1].n_output_features_ ) @@ -178,9 +182,7 @@ def predict(self, x, multiple_trajectories=False): if hasattr(self, "model"): return self.model.predict(x) else: - raise NotFittedError( - "SINDy model must be fit before predict can be called" - ) + raise NotFittedError("SINDy model must be fit before predict can be called") def equations(self, precision=3): """ @@ -209,9 +211,7 @@ def equations(self, precision=3): .get_feature_names(input_features=base_feature_names) ) return [ - equation( - est, input_features=feature_names, precision=precision - ) + equation(est, input_features=feature_names, precision=precision) for est in self.model.estimators_ ] else: @@ -377,14 +377,10 @@ def differentiate(self, x, t=1, multiple_trajectories=False): method """ if self.discrete_time: - raise RuntimeError( - "No differentiation implemented for discrete time model" - ) + raise RuntimeError("No differentiation implemented for discrete time model") if multiple_trajectories: - return self.process_multiple_trajectories( - x, t, None, return_array=False - )[1] + return self.process_multiple_trajectories(x, t, None, return_array=False)[1] else: x = validate_input(x, t) return self.differentiation_method(x, t) @@ -414,9 +410,7 @@ def get_feature_names(self): "SINDy model must be fit before get_feature_names is called" ) - def simulate( - self, x0, t, integrator=odeint, stop_condition=None, **integrator_kws - ): + def simulate(self, x0, t, integrator=odeint, stop_condition=None, **integrator_kws): """ Simulate the SINDy model forward in time. @@ -456,7 +450,7 @@ def simulate( x = zeros((t, self.n_input_features_)) x[0] = x0 for i in range(1, t): - x[i] = self.predict(x[i - 1: i]) + x[i] = self.predict(x[i - 1 : i]) if stop_condition is not None and stop_condition(x[i]): return x[: i + 1] return x @@ -474,6 +468,4 @@ def rhs(x, t): @property def complexity(self): - return sum( - est.steps[1][1].complexity for est in self.model.estimators_ - ) + return sum(est.steps[1][1].complexity for est in self.model.estimators_) diff --git a/pysindy/utils/base.py b/pysindy/utils/base.py index 6116ce046..09ef30d67 100644 --- a/pysindy/utils/base.py +++ b/pysindy/utils/base.py @@ -1,5 +1,5 @@ -from itertools import repeat from functools import wraps +from itertools import repeat import numpy as np from sklearn.utils.validation import check_array @@ -80,9 +80,9 @@ def prox_cad(x, lower_threshold): .. code :: prox_cad(z, a, b) = - 0 if \|z\| < a - sign(z)(\|z\| - a) if a < \|z\| <= b - z if \|z\| > b + 0 if |z| < a + sign(z)(|z| - a) if a < |z| <= b + z if |z| > b Entries of :math:`x` smaller than a in magnitude are set to 0, entries with magnitudes larger than b are untouched, diff --git a/requirements-dev.txt b/requirements-dev.txt index 660a168ee..20cbd0163 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,7 +4,6 @@ matplotlib pytest pytest-cov pytest-lazy-fixture -pytest-flake8 flake8-builtins-unleashed codecov setuptools_scm @@ -14,3 +13,4 @@ notebook sphinx >= 2 sphinxcontrib-apidoc sphinx_rtd_theme +pre-commit diff --git a/setup.cfg b/setup.cfg index aad78fcc6..111fcd823 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,18 @@ [flake8] -exclude = *__init__. - -[tool:pytest] -flake8-max-line-length = 79 +exclude = + .git, + .venv, + dist, + build, + __pycache__ +ignore = + W503 # Line break before binary operator - Conflicts black + E203 # Whitespace before ':' - Conflicts black +per-file-ignores = + __init__.py:F401,F403 +max-line-length = 88 +import-order-style = smarkets +statistics = True +count = True +verbose = 1 +# format = [%(code)s] %(text)s @ %(path)s:%(row)d:%(col)d diff --git a/setup.py b/setup.py index 9f0e5c480..6e9fe7f3f 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,8 @@ -import sys import pathlib -from setuptools import find_packages, setup +import sys + +from setuptools import find_packages +from setuptools import setup assert sys.version_info >= (3, 6, 0), "PySindy requires Python 3.6+" diff --git a/test/conftest.py b/test/conftest.py index 487606bfb..7e3b78ad5 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,9 +1,8 @@ """ Shared pytest fixtures for unit tests. """ - -import pytest import numpy as np +import pytest from scipy.integrate import odeint from pysindy.feature_library import CustomLibrary diff --git a/test/differentiation/test_differentiation_methods.py b/test/differentiation/test_differentiation_methods.py index 2fcfa4e46..25588463f 100644 --- a/test/differentiation/test_differentiation_methods.py +++ b/test/differentiation/test_differentiation_methods.py @@ -1,9 +1,8 @@ """ Unit tests for differentiation methods. """ - -import pytest import numpy as np +import pytest from pysindy.differentiation import FiniteDifference diff --git a/test/feature_library/test_feature_library.py b/test/feature_library/test_feature_library.py index ce525cc2e..1b39f4289 100644 --- a/test/feature_library/test_feature_library.py +++ b/test/feature_library/test_feature_library.py @@ -1,14 +1,11 @@ """ Unit tests for feature libraries. """ - import pytest -from pysindy.feature_library import ( - PolynomialLibrary, - FourierLibrary, - CustomLibrary, -) +from pysindy.feature_library import CustomLibrary +from pysindy.feature_library import FourierLibrary +from pysindy.feature_library import PolynomialLibrary def test_form_custom_library(): @@ -20,9 +17,7 @@ def test_form_custom_library(): ] # Test with user-supplied function names - CustomLibrary( - library_functions=library_functions, function_names=function_names - ) + CustomLibrary(library_functions=library_functions, function_names=function_names) # Test without user-supplied function names CustomLibrary(library_functions=library_functions) diff --git a/test/optimizers/test_optimizers.py b/test/optimizers/test_optimizers.py index 8f739ae5d..35344d9b8 100644 --- a/test/optimizers/test_optimizers.py +++ b/test/optimizers/test_optimizers.py @@ -1,10 +1,12 @@ """ Unit tests for optimizers. """ - import pytest -from pysindy.optimizers import STLSQ, SR3, LASSO, ElasticNet +from pysindy.optimizers import ElasticNet +from pysindy.optimizers import LASSO +from pysindy.optimizers import SR3 +from pysindy.optimizers import STLSQ @pytest.mark.parametrize("optimizer", [STLSQ(), SR3(), LASSO(), ElasticNet()]) diff --git a/test/test_pysindy.py b/test/test_pysindy.py index dcbaabe3c..91625cfbd 100644 --- a/test/test_pysindy.py +++ b/test/test_pysindy.py @@ -12,15 +12,17 @@ pytest file_to_test.py """ - import pytest - from sklearn.exceptions import NotFittedError from pysindy import SINDy from pysindy.differentiation import FiniteDifference -from pysindy.optimizers import STLSQ, SR3, LASSO, ElasticNet -from pysindy.feature_library import PolynomialLibrary, FourierLibrary +from pysindy.feature_library import FourierLibrary +from pysindy.feature_library import PolynomialLibrary +from pysindy.optimizers import ElasticNet +from pysindy.optimizers import LASSO +from pysindy.optimizers import SR3 +from pysindy.optimizers import STLSQ def test_get_feature_names_len(data_lorenz): @@ -88,8 +90,7 @@ def test_mixed_inputs(data): @pytest.mark.parametrize( - "data", - [pytest.lazy_fixture("data_1d"), pytest.lazy_fixture("data_lorenz")], + "data", [pytest.lazy_fixture("data_1d"), pytest.lazy_fixture("data_lorenz")], ) def test_bad_t(data): x, t = data