Skip to content

Commit

Permalink
feat(trapping): Enable merging trapping and user constraints
Browse files Browse the repository at this point in the history
Also:
* (constrained_sr3)clean up creating cvxpy constraints
* (constrained_sr3)Clone the cvxpy problem in case of except statements,
because prob.solve() mutates prob in a mathematically significant way
  • Loading branch information
Jacob-Stevens-Haas committed Jan 10, 2024
1 parent e0f723b commit c8339c0
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 55 deletions.
63 changes: 36 additions & 27 deletions pysindy/optimizers/constrained_sr3.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import warnings
from copy import deepcopy
from typing import Optional
from typing import Tuple

try:
Expand Down Expand Up @@ -168,7 +170,7 @@ def __init__(
thresholds=None,
equality_constraints=False,
inequality_constraints=False,
constraint_separation_index=0,
constraint_separation_index: Optional[bool] = None,
verbose=False,
verbose_cvxpy=False,
unbias=False,
Expand All @@ -194,7 +196,7 @@ def __init__(
self.constraint_lhs = constraint_lhs
self.constraint_rhs = constraint_rhs
self.constraint_order = constraint_order
self.use_constraints = (constraint_lhs is not None) and (
self.use_constraints = (constraint_lhs is not None) or (
constraint_rhs is not None
)

Expand All @@ -208,7 +210,7 @@ def __init__(
" but user did not specify if the constraints were equality or"
" inequality constraints. Assuming equality constraints."
)
self.equality_constraints = True
equality_constraints = True

if self.use_constraints:
if constraint_order not in ("feature", "target"):
Expand Down Expand Up @@ -243,6 +245,16 @@ def __init__(
)
self.inequality_constraints = inequality_constraints
self.equality_constraints = equality_constraints
if self.use_constraints and constraint_separation_index is None:
if self.inequality_constraints and not self.equality_constraints:
constraint_separation_index = len(constraint_lhs)
elif self.equality_constraints and not self.inequality_constraints:
constraint_separation_index = 0
else:
raise ValueError(
"If passing both inequality and equality constraints, must specify"
" constraint_separation_index."
)
self.constraint_separation_index = constraint_separation_index

def _update_full_coef_constraints(self, H, x_transpose_y, coef_sparse):
Expand Down Expand Up @@ -276,30 +288,20 @@ def _create_var_and_part_cost(

def _update_coef_cvxpy(self, xi, cost, var_len, coef_prev, tol):
if self.use_constraints:
if self.inequality_constraints and self.equality_constraints:
# Process inequality constraints then equality constraints
prob = cp.Problem(
cp.Minimize(cost),
[
self.constraint_lhs[: self.constraint_separation_index, :] @ xi
<= self.constraint_rhs[: self.constraint_separation_index],
self.constraint_lhs[self.constraint_separation_index :, :] @ xi
== self.constraint_rhs[self.constraint_separation_index :],
],
constraints = []
if self.equality_constraints:
constraints.append(
self.constraint_lhs[self.constraint_separation_index :, :] @ xi
== self.constraint_rhs[self.constraint_separation_index :],
)
elif self.inequality_constraints:
prob = cp.Problem(
cp.Minimize(cost),
[self.constraint_lhs @ xi <= self.constraint_rhs],
)
else:
prob = cp.Problem(
cp.Minimize(cost),
[self.constraint_lhs @ xi == self.constraint_rhs],
if self.inequality_constraints:
constraints.append(
self.constraint_lhs[: self.constraint_separation_index, :] @ xi
<= self.constraint_rhs[: self.constraint_separation_index]
)
else:
prob = cp.Problem(cp.Minimize(cost))
prob = cp.Problem(cp.Minimize(cost), constraints)

prob_clone = deepcopy(prob)
# default solver is SCS/OSQP here but switches to ECOS for L2
try:
prob.solve(
Expand All @@ -313,13 +315,20 @@ def _update_coef_cvxpy(self, xi, cost, var_len, coef_prev, tol):
# similar semantic changes for the other variables.
except (TypeError, ValueError):
try:
prob = prob_clone
prob.solve(max_iters=self.max_iter, verbose=self.verbose_cvxpy)
xi = prob.variables()[0]
except cp.error.SolverError:
print("Solver failed, setting coefs to zeros")
warnings.warn("Solver failed, setting coefs to zeros")
xi.value = np.zeros(var_len)
except cp.error.SolverError:
print("Solver failed, setting coefs to zeros")
xi.value = np.zeros(var_len)
try:
prob = prob_clone
prob.solve(max_iter=self.max_iter, verbose=self.verbose_cvxpy)
xi = prob.variables()[0]
except cp.error.SolverError:
warnings.warn("Solver failed, setting coefs to zeros")
xi.value = np.zeros(var_len)

if xi.value is None:
warnings.warn(
Expand Down
33 changes: 24 additions & 9 deletions pysindy/optimizers/trapping_sr3.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,8 @@ def __init__(
self,
*,
_n_tgts: int = None,
_include_bias: bool = True,
_include_bias: bool = False,
_interaction_only: bool = False,
eta: Union[float, None] = None,
eps_solver: float = 1e-7,
relax_optim: bool = True,
Expand All @@ -181,20 +182,30 @@ def __init__(
# _reduce/fit (). The following is a hack until we refactor how
# constraints are applied in ConstrainedSR3 and MIOSR
self._include_bias = _include_bias
self._interaction_only = _interaction_only
self._n_tgts = _n_tgts
if _n_tgts is None:
warnings.warn(
"Trapping Optimizer initialized without _n_tgts. It will likely"
" be unable to fit data"
)
_n_tgts = 1
constraint_separation_index = kwargs.get("constraint_separation_index", 0)
if _include_bias:
raise ValueError(
"Currently not able to include bias until PQ matrices are modified"
)
if hasattr(kwargs, "constraint_separation_index"):
constraint_separation_index = kwargs["constraint_separation_index"]
elif kwargs.get("inequality_constraints", False):
constraint_separation_index = kwargs["constraint_lhs"].shape[0]
else:
constraint_separation_index = 0
constraint_rhs, constraint_lhs = _make_constraints(
_n_tgts, include_bias=_include_bias
)
constraint_order = kwargs.get("constraint_order", "feature")
constraint_order = kwargs.pop("constraint_order", "feature")
if constraint_order == "target":
constraint_lhs = np.reshape(np.transpose(constraint_lhs, [1, 0, 2]))
constraint_lhs = np.transpose(constraint_lhs, [0, 2, 1])
constraint_lhs = np.reshape(constraint_lhs, (constraint_lhs.shape[0], -1))
try:
constraint_lhs = np.concatenate(
Expand Down Expand Up @@ -460,22 +471,26 @@ def _reduce(self, x, y):
self.PW_history_ = []
self.PWeigs_history_ = []
self.history_ = []
n_samples, n_features = x.shape
n_tgts = y.shape[1]
n_samples, n_tgts = y.shape
n_features = n_poly_features(
n_tgts,
2,
include_bias=self._include_bias,
interaction_only=self._interaction_only,
)
var_len = n_features * n_tgts
n_feat_expected = int((n_tgts**2 + 3 * n_tgts) / 2.0)

# Only relevant if the stability term is turned on.
self.PL_unsym_, self.PL_, self.PQ_ = self._set_Ptensors(n_tgts)
# make sure dimensions/symmetries are correct
self.PL_, self.PQ_ = self._check_P_matrix(
n_tgts, n_features, n_feat_expected, self.PL_, self.PQ_
n_tgts, n_features, n_features, self.PL_, self.PQ_
)

# Set initial coefficients
if self.use_constraints and self.constraint_order.lower() == "target":
self.constraint_lhs = reorder_constraints(
self.constraint_lhs, n_features, output_order="target"
self.constraint_lhs, n_features, output_order="feature"
)
coef_sparse = self.coef_.T

Expand Down
54 changes: 35 additions & 19 deletions test/test_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ def _align_optimizer_and_1dfeatures(
) -> tuple[BaseOptimizer, NDArray]:
# This is a hack until constraints are moved from init to fit
if isinstance(opt, TrappingSR3):
opt = TrappingSR3(_n_tgts=1, _include_bias=True)
features = np.hstack([features, features, features])
opt = TrappingSR3(_n_tgts=1, _include_bias=False)
features = np.hstack([features, features])
else:
features = features
return opt, features
Expand Down Expand Up @@ -435,6 +435,7 @@ def test_constrained_sr3_quadratic_library(params):
dict(thresholder="l2", threshold=1, expected=1.5),
dict(thresholder="weighted_l2", thresholds=np.ones((4, 1)), expected=2.5),
],
ids=lambda d: d["thresholder"],
)
def test_stable_linear_sr3_cost_function(params):
expected = params.pop("expected")
Expand Down Expand Up @@ -773,32 +774,35 @@ def test_fit_warn(data_derivative_1d, optimizer):
"optimizer",
[
(ConstrainedSR3, {"max_iter": 80}),
(TrappingSR3, {"_n_tgts": 5, "max_iter": 100}),
(TrappingSR3, {"_n_tgts": 3, "max_iter": 100, "eps_solver": 1e-5}),
(MIOSR, {}),
],
ids=lambda param: param[0].__name__ + " " + ",".join([key for key in param[1]]),
)
@pytest.mark.parametrize("target_value", [0, -1, 3])
def test_row_format_constraints(data_linear_combination, optimizer, target_value):
def test_feature_format_constraints(data_linear_combination, optimizer, target_value):
# Solution is x_dot = x.dot(np.array([[1, 1, 0], [0, 1, 1]]))
x, x_dot = data_linear_combination
x, y = data_linear_combination

constraint_rhs = target_value * np.ones(2)
constraint_lhs = np.zeros((2, x.shape[1] * x_dot.shape[1]))
constraint_lhs = np.zeros((2, x.shape[1], y.shape[1]))

# Should force corresponding entries of coef_ to be target_value
constraint_lhs[0, 0] = 1
constraint_lhs[1, 3] = 1
constraint_lhs[0, 1, 1] = 1
constraint_lhs[1, 2, 2] = 1
# reshape to "feature" order
constraint_lhs = np.reshape(constraint_lhs, (constraint_lhs.shape[0], -1))

model = optimizer[0](
constraint_lhs=constraint_lhs,
constraint_rhs=constraint_rhs,
constraint_order="feature",
**optimizer[1],
)
model.fit(x, x_dot)
model.fit(x, y)

np.testing.assert_allclose(
np.array([model.coef_[0, 0], model.coef_[1, 1]]), target_value, atol=1e-8
np.array([model.coef_[1, 1], model.coef_[2, 2]]), target_value, atol=1e-7
)


Expand All @@ -807,26 +811,37 @@ def test_row_format_constraints(data_linear_combination, optimizer, target_value
[
(ConstrainedSR3, {"max_iter": 80}),
(StableLinearSR3, {}),
(TrappingSR3, {"max_iter": 100}),
(TrappingSR3, {"_n_tgts": 3, "max_iter": 200, "eps_solver": 1e-5}),
(MIOSR, {}),
],
ids=lambda param: param[0].__name__ + " " + ",".join([key for key in param[1]]),
)
@pytest.mark.parametrize("target_value", [0, -1, 3])
def test_target_format_constraints(data_linear_combination, optimizer, target_value):
x, x_dot = data_linear_combination
x, y = data_linear_combination

constraint_rhs = target_value * np.ones(2)
constraint_lhs = np.zeros((2, x.shape[1] * x_dot.shape[1]))
constraint_lhs = np.zeros((2, x.shape[1], y.shape[1]))

# Should force corresponding entries of coef_ to be target_value
constraint_lhs[0, 1] = 1
constraint_lhs[1, 4] = 1
constraint_lhs[0, 2, 1] = 1
constraint_lhs[1, 1, 2] = 1
# reshape to "target" order
constraint_lhs = np.reshape(
np.transpose(constraint_lhs, [0, 2, 1]), (constraint_lhs.shape[0], -1)
)

model = optimizer[0](
constraint_lhs=constraint_lhs, constraint_rhs=constraint_rhs, **optimizer[1]
constraint_lhs=constraint_lhs,
constraint_rhs=constraint_rhs,
constraint_order="target",
**optimizer[1],
)
model.fit(x, y)

np.testing.assert_allclose(
np.array([model.coef_[1, 2], model.coef_[2, 1]]), target_value, atol=1e-7
)
model.fit(x, x_dot)
np.testing.assert_allclose(model.coef_[:, 1], target_value, atol=1e-8)


@pytest.mark.parametrize(
Expand Down Expand Up @@ -876,10 +891,11 @@ def test_constrained_inequality_constraints(data_lorenz, params):
thresholder="weighted_l2", thresholds=0.5 * np.ones((1, 2)), expected=0.75
),
],
ids=lambda d: d["thresholder"],
)
def test_trapping_cost_function(params):
expected = params.pop("expected")
opt = TrappingSR3(inequality_constraints=True, relax_optim=True, **params)
opt = TrappingSR3(relax_optim=True, **params)
x = np.eye(2)
y = np.ones(2)
xi, cost = opt._create_var_and_part_cost(2, x, y)
Expand Down

0 comments on commit c8339c0

Please sign in to comment.