Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove unused imports #567

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions benchmarks/lbfgs_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,12 @@

"""Benchmark LBFGS implementation."""

import time

from absl import app
from absl import flags

from sklearn import datasets

import jax
import jax.numpy as jnp
import jaxopt

Expand Down
1 change: 0 additions & 1 deletion benchmarks/proximal_gradient_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import time

from typing import NamedTuple
from typing import Sequence

from absl import app
Expand Down
2 changes: 0 additions & 2 deletions examples/deep_learning/plot_sgd_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@

from absl import flags

import logging
import sys
from timeit import default_timer as timer

Expand All @@ -49,7 +48,6 @@
from jaxopt import ArmijoSGD
from jaxopt import PolyakSGD
from jaxopt import OptaxSolver
from jaxopt.tree_util import tree_l2_norm, tree_sub

import optax
from flax import linen as nn
Expand Down
6 changes: 1 addition & 5 deletions examples/fixed_point/deep_equilibrium_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,19 +34,16 @@
"""

from functools import partial
from typing import Any, Mapping, Tuple, Callable
from typing import Any, Tuple, Callable

from absl import app
from absl import flags

import flax
from flax import linen as nn

import jax
import jax.numpy as jnp
from jax.tree_util import tree_structure

import jaxopt
from jaxopt import loss
from jaxopt import OptaxSolver
from jaxopt import FixedPointIteration
Expand All @@ -58,7 +55,6 @@

import tensorflow_datasets as tfds
import tensorflow as tf
from collections import namedtuple


dataset_names = [
Expand Down
3 changes: 0 additions & 3 deletions examples/fixed_point/plot_picard_ode.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,9 @@

from jaxopt import AndersonAcceleration

from jaxopt import objective
from jaxopt.tree_util import tree_scalar_mul, tree_sub

import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from matplotlib.pyplot import cm
import scipy.integrate

Expand Down
1 change: 0 additions & 1 deletion examples/implicit_diff/lasso_implicit_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from absl import app
from absl import flags

import jax
import jax.numpy as jnp

from jaxopt import BlockCoordinateDescent
Expand Down
9 changes: 3 additions & 6 deletions jaxopt/_src/anderson.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from typing import Any
from typing import Callable
from typing import NamedTuple
from typing import List
from typing import Union

from typing import Optional
Expand All @@ -27,12 +26,10 @@
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt._src import linear_solve
from jaxopt._src.tree_util import tree_l2_norm, tree_sub
from jaxopt._src.tree_util import tree_vdot, tree_add
from jaxopt._src.tree_util import tree_mul, tree_scalar_mul
from jaxopt._src.tree_util import tree_sub
from jaxopt._src.tree_util import tree_vdot
from jaxopt._src.tree_util import tree_average, tree_add_scalar_mul
from jaxopt._src.tree_util import tree_map, tree_gram
from jaxopt._src.tree_util import tree_map


def minimize_residuals(residual_gram, ridge):
Expand Down
3 changes: 1 addition & 2 deletions jaxopt/_src/anderson_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt._src.tree_util import tree_l2_norm, tree_sub, tree_map
from jaxopt._src.anderson import AndersonAcceleration
from jaxopt._src.tree_util import tree_sub, tree_map
from jaxopt._src.anderson import anderson_step, update_history


Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/backtracking_linesearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

from dataclasses import dataclass

import jax
import jax.numpy as jnp

from jaxopt._src import base
Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
# jaxopt._src.linear_solve instead.
# This allows to define linear solver with base.Solver interface,
# and then exporting them in jaxopt.linear_solve.
from jaxopt._src import linear_solve

from jaxopt import loop
from jaxopt import tree_util
Expand Down
2 changes: 0 additions & 2 deletions jaxopt/_src/bisection.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt._src import implicit_diff as idf
from jaxopt._src import loop


class BisectionState(NamedTuple):
Expand Down
3 changes: 0 additions & 3 deletions jaxopt/_src/block_cd.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt._src import implicit_diff as idf
from jaxopt._src import loop
from jaxopt._src import objective
from jaxopt._src import tree_util


class BlockCDState(NamedTuple):
Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/cd_qp.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

from jaxopt._src import base
from jaxopt._src import projection
from jaxopt._src import tree_util


class BoxCDQPState(NamedTuple):
Expand Down
4 changes: 0 additions & 4 deletions jaxopt/_src/cvxpy_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,14 @@
# limitations under the License.
"""CVXPY wrappers."""

from typing import Any
from typing import Callable
from typing import Optional
from typing import Tuple

from dataclasses import dataclass

import jax
import jax.numpy as jnp
from jaxopt._src import base
from jaxopt._src import implicit_diff as idf
from jaxopt._src import linear_solve
from jaxopt._src import tree_util


Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/fixed_point_iteration.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from dataclasses import dataclass

import jax.numpy as jnp
from jax.tree_util import tree_leaves, tree_structure

from jaxopt._src import base
from jaxopt._src.tree_util import tree_l2_norm, tree_sub
Expand Down
2 changes: 0 additions & 2 deletions jaxopt/_src/gradient_descent.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@
"""Implementation of gradient descent in JAX."""

from typing import Any
from typing import Callable
from typing import NamedTuple
from typing import Union

from dataclasses import dataclass

Expand Down
6 changes: 1 addition & 5 deletions jaxopt/_src/iterative_refinement.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,11 @@
from dataclasses import dataclass
from functools import partial

import jax
import jax.numpy as jnp

from jaxopt._src import loop
from jaxopt._src import base
from jaxopt._src import implicit_diff as idf
from jaxopt._src.tree_util import tree_zeros_like, tree_add, tree_sub
from jaxopt._src.tree_util import tree_add_scalar_mul, tree_scalar_mul
from jaxopt._src.tree_util import tree_vdot, tree_negative, tree_l2_norm
from jaxopt._src.tree_util import tree_l2_norm
from jaxopt._src.linear_operator import _make_linear_operator
import jaxopt._src.linear_solve as linear_solve

Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/lbfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import warnings

from dataclasses import dataclass
from functools import partial
from typing import Any, Callable, NamedTuple, Optional, Union

import jax
Expand Down
3 changes: 1 addition & 2 deletions jaxopt/_src/levenberg_marquardt.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

"""Levenberg-Marquardt algorithm in JAX."""

import math
from typing import Any
from typing import Callable
from typing import Literal
Expand All @@ -35,7 +34,7 @@
from jaxopt._src.linear_solve import solve_inv
from jaxopt._src.linear_solve import solve_lu
from jaxopt._src.linear_solve import solve_qr
from jaxopt._src.tree_util import tree_l2_norm, tree_inf_norm, tree_sub, tree_add, tree_mul
from jaxopt._src.tree_util import tree_l2_norm, tree_inf_norm


class LevenbergMarquardtState(NamedTuple):
Expand Down
3 changes: 1 addition & 2 deletions jaxopt/_src/linear_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,8 @@
import functools
import jax
import jax.numpy as jnp
import numpy as onp

from jaxopt.tree_util import tree_map, tree_sum, tree_mul
from jaxopt.tree_util import tree_map


class DenseLinearOperator:
Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/nonlinear_cg.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from jaxopt._src.linesearch_util import _setup_linesearch
from jaxopt._src.tree_util import tree_single_dtype, get_real_dtype
from jaxopt.tree_util import tree_add_scalar_mul
from jaxopt.tree_util import tree_div
from jaxopt.tree_util import tree_l2_norm
from jaxopt.tree_util import tree_scalar_mul
from jaxopt.tree_util import tree_sub
Expand Down
3 changes: 1 addition & 2 deletions jaxopt/_src/osqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""GPU-friendly implementation of OSQP."""
from abc import ABC, abstractmethod
from abc import abstractmethod
from dataclasses import dataclass
from functools import partial

Expand All @@ -24,7 +24,6 @@
from typing import Union

import jax
import jax.nn as nn
import jax.numpy as jnp
from jax.tree_util import tree_reduce

Expand Down
2 changes: 0 additions & 2 deletions jaxopt/_src/scipy_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
# currently only ScipyMinimize exposes this option.
"""

import abc
import dataclasses
from dataclasses import dataclass
from typing import Any
from typing import Callable
Expand Down
3 changes: 0 additions & 3 deletions jaxopt/_src/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,13 @@

"""Test utilities."""

from absl.testing import absltest
from absl.testing import parameterized

import functools

import jax
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt._src import loss

import numpy as onp
import scipy as osp
Expand Down
1 change: 0 additions & 1 deletion jaxopt/_src/zoom_linesearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from jaxopt.tree_util import tree_scalar_mul
from jaxopt.tree_util import tree_vdot_real
from jaxopt.tree_util import tree_conj
from jaxopt.tree_util import tree_l2_norm

# pylint: disable=g-bare-generic
# pylint: disable=invalid-name
Expand Down
6 changes: 1 addition & 5 deletions tests/anderson_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,14 @@
import jax
import jax.numpy as jnp
from jax import scipy as jsp
from jax.tree_util import tree_map, tree_all
from jax.test_util import check_grads

import jaxopt
from jaxopt.tree_util import tree_l2_norm, tree_scalar_mul
from jaxopt._src.tree_util import tree_average, tree_sub
from jaxopt import objective
from jaxopt.tree_util import tree_l2_norm
from jaxopt import AndersonAcceleration
from jaxopt._src import test_util

import numpy as onp
from sklearn import datasets


class AndersonAccelerationTest(test_util.JaxoptTestCase):
Expand Down
9 changes: 0 additions & 9 deletions tests/anderson_wrapper_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,31 +14,22 @@


from absl.testing import absltest
from absl.testing import parameterized

import jax
import jax.numpy as jnp
from jax import config
from jax.tree_util import tree_map, tree_all
from jax.test_util import check_grads
import optax

from jaxopt.tree_util import tree_l2_norm, tree_scalar_mul, tree_sub
from jaxopt import objective

from jaxopt import projection
from jaxopt import prox
from jaxopt._src import test_util

from jaxopt import AndersonWrapper
from jaxopt import BlockCoordinateDescent
from jaxopt import GradientDescent
from jaxopt import OptaxSolver
from jaxopt import PolyakSGD
from jaxopt import ProximalGradient

import numpy as onp
import scipy
from sklearn import datasets


Expand Down
7 changes: 0 additions & 7 deletions tests/base_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,13 @@
from typing import Any
from typing import Callable
from typing import NamedTuple
from typing import Optional

import dataclasses

import jax
import jax.numpy as jnp

from jaxopt._src import base
from jaxopt.tree_util import tree_add
from jaxopt.tree_util import tree_add_scalar_mul
from jaxopt.tree_util import tree_l2_norm
from jaxopt.tree_util import tree_scalar_mul
from jaxopt.tree_util import tree_sub
from jaxopt.tree_util import tree_zeros_like


class DummySolverState(NamedTuple):
Expand Down
Loading
Loading