diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index e72415c424..7d24408638 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: os: [ ubuntu-latest, macos-latest, windows-latest ] - python-version: [ 3.8, 3.9, "3.10", "3.11" ] + python-version: [ 3.8, 3.9, "3.10", "3.11", "3.12" ] exclude: # avoid shutil.copytree infinite recursion bug # https://github.com/python/cpython/pull/17098 @@ -27,7 +27,6 @@ jobs: uses: actions/checkout@v4 - name: Setup Python - if: runner.os != 'Windows' uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -35,48 +34,16 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Python dependencies - if: runner.os != 'Windows' run: | pip install --upgrade pip pip install . pip install ".[test, optional]" - - name: Setup Micromamba - if: runner.os == 'Windows' - uses: mamba-org/setup-micromamba@v1 - with: - environment-file: etc/environment.yml - cache-environment: true - cache-downloads: true - create-args: >- - python=${{ matrix.python-version }} - init-shell: >- - bash - powershell - - - name: Install extra Python dependencies - if: runner.os == 'Windows' - shell: bash -l {0} - run: | - pip install xmipy - pip install . - - name: Install Modflow executables uses: modflowpy/install-modflow-action@v1 - name: Run benchmarks - if: runner.os != 'Windows' - working-directory: ./autotest - run: | - mkdir -p .benchmarks - pytest -v --durations=0 --benchmark-only --benchmark-json .benchmarks/${{ matrix.os }}_python${{ matrix.python-version }}.json --keep-failed=.failed - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Run benchmarks - if: runner.os == 'Windows' - shell: bash -l {0} - working-directory: ./autotest + working-directory: autotest run: | mkdir -p .benchmarks pytest -v --durations=0 --benchmark-only --benchmark-json .benchmarks/${{ matrix.os }}_python${{ matrix.python-version }}.json --keep-failed=.failed @@ -115,7 +82,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.12 cache: 'pip' cache-dependency-path: pyproject.toml @@ -151,7 +118,6 @@ jobs: fi python ./scripts/process_benchmarks.py ./autotest/.benchmarks ./autotest/.benchmarks env: - ARTIFACTS: ${{steps.run_tests.outputs.artifact_ids}} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload benchmark results diff --git a/.github/workflows/commit.yml b/.github/workflows/commit.yml index ba88366f2b..d56c1194de 100644 --- a/.github/workflows/commit.yml +++ b/.github/workflows/commit.yml @@ -32,11 +32,10 @@ jobs: python -c "import flopy; print(f'{flopy.__version__}')" - name: Build package - run: | - python -m build + run: python -m build + - name: Check package - run: | - twine check --strict dist/* + run: twine check --strict dist/* lint: name: Lint @@ -135,13 +134,13 @@ jobs: if: failure() with: name: failed-smoke-${{ runner.os }}-${{ env.PYTHON_VERSION }} - path: ./autotest/.failed/** + path: autotest/.failed/** - name: Upload coverage if: github.repository_owner == 'modflowpy' && (github.event_name == 'push' || github.event_name == 'pull_request') uses: codecov/codecov-action@v3 with: - files: ./autotest/coverage.xml + files: autotest/coverage.xml test: name: Test @@ -158,7 +157,7 @@ jobs: - python-version: '3.8.0' defaults: run: - shell: bash + shell: bash -l {0} timeout-minutes: 45 steps: @@ -166,32 +165,17 @@ jobs: uses: actions/checkout@v4 - name: Setup Python - if: runner.os != 'Windows' uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} cache: 'pip' cache-dependency-path: pyproject.toml - + - name: Install Python dependencies - if: runner.os != 'Windows' run: | pip install --upgrade pip pip install . - pip install ".[test, optional]" - - - name: Setup Micromamba - if: runner.os == 'Windows' - uses: mamba-org/setup-micromamba@v1 - with: - environment-file: etc/environment.yml - cache-environment: true - cache-downloads: true - create-args: >- - python=${{ matrix.python-version }} - init-shell: >- - bash - powershell + pip install ".[test,optional]" - name: Install Modflow-related executables uses: modflowpy/install-modflow-action@v1 @@ -201,30 +185,17 @@ jobs: with: repo: modflow6-nightly-build - - name: Update FloPy packages - if: runner.os != 'Windows' + - name: Update package classes run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - - name: Update FloPy packages - if: runner.os == 'Windows' - shell: bash -l {0} - run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - - - name: Run tests - if: runner.os != 'Windows' - working-directory: ./autotest - run: | - pytest -v -m="not example and not regression" -n=auto --cov=flopy --cov-append --cov-report=xml --durations=0 --keep-failed=.failed --dist loadfile - coverage report - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Run tests - if: runner.os == 'Windows' - shell: bash -l {0} - working-directory: ./autotest + working-directory: autotest run: | - pytest -v -m="not example and not regression" -n=auto --cov=flopy --cov-append --cov-report=xml --durations=0 --keep-failed=.failed --dist loadfile + m="not example and not regression" + if [[ "${{ matrix.python-version}}" == "3.12" ]]; then + m="$m and not generation" + fi + pytest -v -m="$m" -n=auto --cov=flopy --cov-append --cov-report=xml --durations=0 --keep-failed=.failed --dist loadfile coverage report env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -234,11 +205,10 @@ jobs: if: failure() with: name: failed-${{ matrix.os }}-${{ matrix.python-version }} - path: | - ./autotest/.failed/** + path: autotest/.failed/** - name: Upload coverage if: github.repository_owner == 'modflowpy' && (github.event_name == 'push' || github.event_name == 'pull_request') uses: codecov/codecov-action@v3 with: - files: ./autotest/coverage.xml + files: autotest/coverage.xml diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index fe9b014b3b..78d6591c73 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: os: [ ubuntu-latest, macos-latest, windows-latest ] - python-version: [ 3.8, 3.9, "3.10", "3.11" ] + python-version: [ 3.8, 3.9, "3.10", "3.11", "3.12" ] exclude: # avoid shutil.copytree infinite recursion bug # https://github.com/python/cpython/pull/17098 @@ -25,23 +25,7 @@ jobs: - name: Checkout repo uses: actions/checkout@v4 - - name: Setup Python - if: runner.os != 'Windows' - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: pyproject.toml - - - name: Install Python dependencies - if: runner.os != 'Windows' - run: | - pip install --upgrade pip - pip install . - pip install ".[test, optional]" - - name: Setup Micromamba - if: runner.os == 'Windows' uses: mamba-org/setup-micromamba@v1 with: environment-file: etc/environment.yml @@ -53,14 +37,10 @@ jobs: bash powershell - - name: Install extra Python dependencies - if: runner.os == 'Windows' - shell: bash -l {0} - run: | - pip install xmipy - pip install . + - name: Install FloPy + run: pip install . - - name: Workaround OpenGL issue on Linux + - name: OpenGL workaround on Linux if: runner.os == 'Linux' run: | # referenced from https://github.com/pyvista/pyvista/blob/main/.github/workflows/vtk-pre-test.yml#L53 @@ -85,28 +65,11 @@ jobs: repo: modflow6-nightly-build - name: Update FloPy packages - if: runner.os != 'Windows' - run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - - - name: Update FloPy packages - if: runner.os == 'Windows' - shell: bash -l {0} run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - name: Run example tests - if: runner.os != 'Windows' - working-directory: ./autotest - run: | - pytest -v -m="example" -n=auto -s --durations=0 --keep-failed=.failed - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Run example tests - if: runner.os == 'Windows' - shell: bash -l {0} - working-directory: ./autotest - run: | - pytest -v -m="example" -n=auto -s --durations=0 --keep-failed=.failed + working-directory: autotest + run: pytest -v -m="example" -n=auto -s --durations=0 --keep-failed=.failed env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -115,5 +78,4 @@ jobs: if: failure() with: name: failed-example-${{ matrix.os }}-${{ matrix.python-version }} - path: | - ./autotest/.failed/** + path: autotest/.failed/** diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 972f1e1650..a18dec2068 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -12,34 +12,19 @@ jobs: fail-fast: false matrix: os: [ ubuntu-latest, macos-latest, windows-latest ] - python-version: [ 3.8, 3.9, "3.10", "3.11" ] + python-version: [ 3.8, 3.9, "3.10", "3.11", "3.12" ] exclude: # avoid shutil.copytree infinite recursion bug # https://github.com/python/cpython/pull/17098 - python-version: '3.8.0' defaults: run: - shell: bash + shell: bash -l {0} timeout-minutes: 90 steps: - name: Checkout repo uses: actions/checkout@v4 - - name: Setup Python - if: runner.os != 'Windows' - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: pyproject.toml - - - name: Install Python dependencies - if: runner.os != 'Windows' - run: | - pip install --upgrade pip - pip install . - pip install ".[test, optional]" - - name: Setup Micromamba if: runner.os == 'Windows' uses: mamba-org/setup-micromamba@v1 @@ -53,12 +38,8 @@ jobs: bash powershell - - name: Install extra Python dependencies - if: runner.os == 'Windows' - shell: bash -l {0} - run: | - pip install xmipy - pip install . + - name: Install FloPy + run: pip install . - name: Install Modflow-related executables uses: modflowpy/install-modflow-action@v1 @@ -69,25 +50,10 @@ jobs: repo: modflow6-nightly-build - name: Update FloPy packages - if: runner.os != 'Windows' - run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - - - name: Update FloPy packages - if: runner.os == 'Windows' - shell: bash -l {0} run: python -m flopy.mf6.utils.generate_classes --ref develop --no-backup - name: Run regression tests - if: runner.os != 'Windows' - working-directory: ./autotest - run: pytest -v -m="regression" -n=auto --durations=0 --keep-failed=.failed - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Run regression tests - if: runner.os == 'Windows' - shell: bash -l {0} - working-directory: ./autotest + working-directory: autotest run: pytest -v -m="regression" -n=auto --durations=0 --keep-failed=.failed env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -97,5 +63,4 @@ jobs: if: failure() with: name: failed-regression-${{ matrix.os }}-${{ matrix.python-version }} - path: | - ./autotest/.failed/** + path: autotest/.failed/** diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e130f3a337..ef60f3cda1 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -63,11 +63,13 @@ jobs: python -c "import flopy; print(f'FloPy version: {flopy.__version__}')" echo "version=${ver#"v"}" >> $GITHUB_OUTPUT - - name: Update FloPy packages + - name: Update FloPy package classes run: python -m flopy.mf6.utils.generate_classes --ref master --no-backup - - name: Lint Python files - run: python scripts/pull_request_prepare.py + - name: Format Python files + run: | + black -v flopy + isort -v flopy - name: Run tests working-directory: autotest @@ -80,8 +82,7 @@ jobs: if: failure() with: name: failed-${{ matrix.os }}-${{ matrix.python-version }} - path: | - ./autotest/.failed/** + path: autotest/.failed/** - name: Run notebooks working-directory: scripts diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index a41a6deb5a..0000000000 --- a/.pylintrc +++ /dev/null @@ -1,586 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - C0330 - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members=graph.*,requests.* - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - -# Good variable names which should always be accepted, separated by a comma -good-names=kv,nr,nc,sy,ss,iu,hy,hk - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[STRING] - -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a604641b65..1abb137f0b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -55,12 +55,14 @@ Before you submit your Pull Request (PR) consider the following guidelines: ``` 4. Create your patch, **including appropriate test cases**. See [DEVELOPER,md](DEVELOPER.md#running-tests) for guidelines for constructing autotests. -5. Run the [isort import sorter](https://github.com/PyCQA/isort) and [black formatter](https://github.com/psf/black). There is a utility script to do this in the `scripts` directory: +5. Run the formatting tools from the project root: ```shell - python ./scripts/pull_request_prepare.py + black -v flopy + isort -v flopy ``` - Note: Pull Requests must pass isort import and black format checks run on the [GitHub actions](https://github.com/modflowpy/flopy/actions) (*linting*) before they will be accepted. isort can be installed using [`pip`](https://pypi.org/project/isort/) and [`conda`](https://anaconda.org/conda-forge/isort). The black formatter can also be installed using [`pip`](https://pypi.org/project/black/) and [`conda`](https://anaconda.org/conda-forge/black). If the Pull Request fails the *linting* job in the [flopy continuous integration](https://github.com/modflowpy/flopy/actions/workflows/commit.yml) workflow, make sure the latest versions of isort and black are installed. + + Note: Pull Requests must pass format checks run on the [GitHub actions](https://github.com/modflowpy/flopy/actions) before they will be accepted. If the Pull Request fails the `lint` job in the [continuous integration](https://github.com/modflowpy/flopy/actions/workflows/commit.yml) workflow, make sure the latest versions of `black` and `isort` are installed (this may require clearing CI caches). 6. Run the full FloPy test suite and ensure that all tests pass: diff --git a/DEVELOPER.md b/DEVELOPER.md index cda40aa717..e097f21652 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -63,15 +63,15 @@ Then install `flopy` and core dependencies from the project root: pip install . -Alternatively, with Anaconda or Miniconda: - - conda env create -f etc/environment.yml - conda activate flopy - The `flopy` package has a number of [optional dependencies](.docs/optional_dependencies.md), as well as extra dependencies required for linting, testing, and building documentation. Extra dependencies are listed in the `test`, `lint`, `optional`, and `doc` groups under the `[project.optional-dependencies]` section in `pyproject.toml`. Core, linting, testing and optional dependencies are included in the Conda environment in `etc/environment.yml`. Only core dependencies are included in the PyPI package — to install extra dependency groups with pip, use `pip install ".[]"`. For instance, to install all extra dependency groups: pip install ".[test, lint, optional, doc]" +Alternatively, with Anaconda or Miniconda: + + conda env create -n flopy python>=3.8 -f requirements.txt -f requirements-test.txt ... + conda activate flopy + #### Python IDEs ##### Visual Studio Code @@ -80,7 +80,7 @@ VSCode users on Windows may need to run `conda init`, then open a fresh terminal ```json { - "python.defaultInterpreterPath": "/path/to/your/virtual/environment", + "python.defaultInterpreterPath": "/path/to/environment", "python.terminal.activateEnvironment": true } ``` @@ -110,9 +110,9 @@ wget https://github.com/MODFLOW-USGS/executables/releases/download/8.0/linux.zip unzip linux.zip -d /path/to/your/install/location ``` -Then add the install location to your `PATH` +Then add the install location to the `PATH` - export PATH="/path/to/your/install/location:$PATH" + export PATH="/path/to/install/location:$PATH" ##### Mac diff --git a/autotest/conftest.py b/autotest/conftest.py index 6549d99e36..0db0d3ec08 100644 --- a/autotest/conftest.py +++ b/autotest/conftest.py @@ -106,6 +106,14 @@ def pytest_addoption(parser): "but automated tests should probably also check patch collections or figure & axis properties.)", ) + # for test_generate_classes.py + parser.addoption( + "--ref", + action="append", + type=str, + help="Include extra refs to test. Useful for testing branches on a fork, e.g. /modflow6/.", + ) + def pytest_report_header(config): """Header for pytest to show versions of packages.""" diff --git a/autotest/pytest.ini b/autotest/pytest.ini index 8f4616fd37..c94efd1f32 100644 --- a/autotest/pytest.ini +++ b/autotest/pytest.ini @@ -10,8 +10,9 @@ python_files = env_files = .env markers = - slow: tests that don't complete in a few seconds - example: exercise scripts, tutorials and notebooks - regression: tests that compare multiple results - meta: run by other tests (e.g. testing fixtures) - mf6: tests for the mf6 module \ No newline at end of file + example: exercise scripts, tutorials, notebooks + generation: tests for code generation utilities + meta: tests run by other tests + mf6: tests for MODFLOW 6 support + regression: tests comparing multiple versions + slow: tests not completing in a few seconds \ No newline at end of file diff --git a/autotest/test_generate_classes.py b/autotest/test_generate_classes.py new file mode 100644 index 0000000000..d2bd102350 --- /dev/null +++ b/autotest/test_generate_classes.py @@ -0,0 +1,149 @@ +import sys +from os import environ +from pathlib import Path +from pprint import pprint +from typing import Iterable +from warnings import warn + +import pytest +from modflow_devtools.misc import get_current_branch, run_cmd +from virtualenv import cli_run + +branch = get_current_branch() + + +def nonempty(itr: Iterable): + for x in itr: + if x: + yield x + + +def pytest_generate_tests(metafunc): + # defaults + owner = "MODFLOW-USGS" + repo = "modflow6" + ref = [ + f"{owner}/{repo}/develop", + f"{owner}/{repo}/master", + f"{owner}/{repo}/6.4.1", + f"{owner}/{repo}/4458f9f", + f"{owner}/{repo}/4458f9f7a6244182e6acc2430a6996f9ca2df367", + ] + + # refs provided as env vars override the defaults + ref_env = environ.get("TEST_GENERATE_CLASSES_REF") + if ref_env: + ref = nonempty(ref_env.strip().split(",")) + + # refs given as CLI options override everything + ref_opt = metafunc.config.getoption("--ref") + if ref_opt: + ref = nonempty([o.strip() for o in ref_opt]) + + # drop duplicates + ref = list(dict.fromkeys(ref)) + + # drop and warn refs with invalid format + # i.e. not "owner/repo/branch" + for r in ref: + spl = r.split("/") + if len(spl) != 3 or not all(spl): + warn(f"Skipping invalid ref: {r}") + ref.remove(r) + + key = "ref" + if key in metafunc.fixturenames: + metafunc.parametrize(key, ref, scope="session") + + +@pytest.mark.generation +@pytest.mark.mf6 +@pytest.mark.slow +@pytest.mark.skipif( + branch == "master" or branch.startswith("v"), + reason="skip on master and release branches", +) +def test_generate_classes_from_github_refs( + request, project_root_path, ref, worker_id, function_tmpdir +): + # skip if run in parallel with pytest-xdist + argv = ( + request.config.workerinput["mainargv"] + if hasattr(request.config, "workerinput") + else [] + ) + if worker_id != "master" and "loadfile" not in argv: + pytest.skip("can't run in parallel") + + # create virtual environment + venv = function_tmpdir / "venv" + python = venv / "bin" / "python" + pip = venv / "bin" / "pip" + cli_run([str(venv)]) + print(f"Using temp venv at {venv} to test class generation from {ref}") + + # install flopy and dependencies + deps = [str(project_root_path), "modflow-devtools"] + for dep in deps: + out, err, ret = run_cmd(str(pip), "install", dep, verbose=True) + assert not ret, out + err + + # get creation time of files + flopy_path = ( + venv + / "lib" + / f"python{sys.version_info.major}.{sys.version_info.minor}" + / "site-packages" + / "flopy" + ) + assert flopy_path.is_dir() + mod_files = list((flopy_path / "mf6" / "modflow").rglob("*")) + list( + (flopy_path / "mf6" / "data" / "dfn").rglob("*") + ) + mod_file_times = [Path(mod_file).stat().st_mtime for mod_file in mod_files] + pprint(mod_files) + + # split ref into owner, repo, ref name + spl = ref.split("/") + owner = spl[0] + repo = spl[1] + ref = spl[2] + + # generate classes + out, err, ret = run_cmd( + str(python), + "-m", + "flopy.mf6.utils.generate_classes", + "--owner", + owner, + "--repo", + repo, + "--ref", + ref, + "--no-backup", + verbose=True, + ) + assert not ret, out + err + + def get_mtime(f): + try: + return Path(f).stat().st_mtime + except: + return 0 # if file not found + + # make sure files were regenerated + modified_files = [ + mod_files[i] + for i, (before, after) in enumerate( + zip( + mod_file_times, + [get_mtime(f) for f in mod_files], + ) + ) + if after > 0 and after > before + ] + assert any(modified_files) + print(f"{len(modified_files)} files were modified:") + pprint(modified_files) + + # todo checkout mf6 and test with dfnpath? test with backups? diff --git a/docs/make_release.md b/docs/make_release.md index 0da0f17038..5f7ee60305 100644 --- a/docs/make_release.md +++ b/docs/make_release.md @@ -98,7 +98,7 @@ As described above, making a release manually involves the following steps: - Update MODFLOW 6 dfn files in the repository and MODFLOW 6 package classes by running `python -m flopy.mf6.utils.generate_classes --ref master --no-backup` -- Run `isort` and `black` on the `flopy` module. This can be achieved by running `python scripts/pull_request_prepare.py` from the project root. The commands `isort .` and `black .` can also be run individually instead. +- Format Python source files. This can be achieved by running `black` and `isort` on the `flopy` module. - Use `run_notebooks.py` in the `scripts` directory to rerun all notebooks in `.docs/Notebooks`. @@ -121,7 +121,7 @@ As described above, making a release manually involves the following steps: 2. Set the development version as appropriate: `python scripts/update_version.py -v `. The version number must comply with [PEP 440](https://peps.python.org/pep-0440/). -3. Lint Python files: `python scripts/pull_request_prepare.py` +3. Format Python files by running `black` and `isort` on the `flopy` module. 4. Commit and push the updated `develop` branch. diff --git a/etc/environment.yml b/etc/environment.yml index 592e4cad0f..f49c11f8e5 100644 --- a/etc/environment.yml +++ b/etc/environment.yml @@ -9,6 +9,7 @@ dependencies: # lint - black + - cffconvert - flake8 - isort - pylint @@ -19,9 +20,7 @@ dependencies: - filelock - jupyter - jupytext - - pip - - pip: - - modflow-devtools + - modflow-devtools - pytest - pytest-benchmark - pytest-cases @@ -49,6 +48,4 @@ dependencies: - pyvista - imageio - pymetis - - # MODFLOW API dependencies - xmipy diff --git a/examples/data/mf6/test006_2models_mvr/model1.dis b/examples/data/mf6/test006_2models_mvr/model1.dis index 3e5c3eaa5e..07637f87e5 100644 --- a/examples/data/mf6/test006_2models_mvr/model1.dis +++ b/examples/data/mf6/test006_2models_mvr/model1.dis @@ -11,7 +11,7 @@ end dimensions BEGIN GRIDDATA IDOMAIN - INTERNAL FACTOR 1 IPRN + INTERNAL FACTOR 1 IPRN 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 1 1 diff --git a/examples/data/mf6/test006_2models_mvr/model1.ic b/examples/data/mf6/test006_2models_mvr/model1.ic index 841d89fc74..b1069687b6 100644 --- a/examples/data/mf6/test006_2models_mvr/model1.ic +++ b/examples/data/mf6/test006_2models_mvr/model1.ic @@ -4,7 +4,7 @@ end options BEGIN GRIDDATA strt -INTERNAL FACTOR 1.0 IPRN +INTERNAL FACTOR 1.0 IPRN 0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 diff --git a/examples/data/mf6/test006_gwf3/flow.ic b/examples/data/mf6/test006_gwf3/flow.ic index 3d1b93d931..ce4024e3fa 100644 --- a/examples/data/mf6/test006_gwf3/flow.ic +++ b/examples/data/mf6/test006_gwf3/flow.ic @@ -4,7 +4,7 @@ end options BEGIN GRIDDATA strt -INTERNAL FACTOR 1 IPRN +INTERNAL FACTOR 1 IPRN 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 diff --git a/pyproject.toml b/pyproject.toml index 7acafa0b3a..bb5ebb487b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering :: Hydrology", ] requires-python = ">=3.8" @@ -120,4 +121,4 @@ target_version = ["py38"] [tool.isort] profile = "black" src_paths = ["flopy"] -line_length = 79 +line_length = 79 \ No newline at end of file diff --git a/scripts/README.md b/scripts/README.md index e64f9c7284..85e362910e 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -29,14 +29,6 @@ For instance, `e689af57e7439b9005749d806248897ad550eab5_20150811_041632_uncommit **Note**: the `process_benchmarks.py` script depends on `seaborn`, which is not included as a dependency in either `etc/environment.yml` or in any of the optional groups in `pyproject.toml`, since this is the only place it is used in this repository. -## Preparing for PRs - -The `pull_request_prepare.py` script lints Python source code files by running `black` and `isort` on the `flopy` subdirectory. This script should be run before opening a pull request, as CI will fail if the code is not properly formatted. For instance, from the project root: - -```shell -python scripts/pull_request_prepare.py -``` - ## Running notebooks The `run_notebooks.py` script runs notebooks located in the `.docs/Notebooks` directory. diff --git a/scripts/pull_request_prepare.py b/scripts/pull_request_prepare.py deleted file mode 100644 index af663f7f00..0000000000 --- a/scripts/pull_request_prepare.py +++ /dev/null @@ -1,21 +0,0 @@ -import os - -try: - import isort - - print(f"isort version: {isort.__version__}") -except ModuleNotFoundError: - print("isort not installed\n\tInstall using pip install isort") - -try: - import black - - print(f"black version: {black.__version__}") -except ModuleNotFoundError: - print("black not installed\n\tInstall using pip install black") - -print("running isort...") -os.system("isort -v ../flopy") - -print("running black...") -os.system("black -v ../flopy")