Skip to content

Commit

Permalink
fix KPLSK performances (#544)
Browse files Browse the repository at this point in the history
* fix kplsk

* add test

* fix for mfkplsk

* fix error

* ruff
  • Loading branch information
Paul-Saves authored Apr 10, 2024
1 parent 161a22c commit b6bd190
Show file tree
Hide file tree
Showing 2 changed files with 95 additions and 31 deletions.
68 changes: 37 additions & 31 deletions smt/surrogate_models/krg_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -1824,19 +1824,19 @@ def hessian_minus_reduced_likelihood_function(log10t):
else:
n_iter = 0

for ii in range(n_iter, -1, -1):
(
best_optimal_theta,
best_optimal_rlf_value,
best_optimal_par,
constraints,
) = (
[],
[],
[],
[],
)
(
best_optimal_theta,
best_optimal_rlf_value,
best_optimal_par,
constraints,
) = (
[],
[],
[],
[],
)

for ii in range(n_iter, -1, -1):
bounds_hyp = []

self.theta0 = deepcopy(self.options["theta0"])
Expand All @@ -1848,16 +1848,22 @@ def hessian_minus_reduced_likelihood_function(log10t):
# to theta in (0,2e1]
theta_bounds = self.options["theta_bounds"]
if self.theta0[i] < theta_bounds[0] or self.theta0[i] > theta_bounds[1]:
warnings.warn(
f"theta0 is out the feasible bounds ({self.theta0}[{i}] out of \
[{theta_bounds[0]}, {theta_bounds[1]}]). \
A random initialisation is used instead."
)
self.theta0[i] = self.random_state.rand()
self.theta0[i] = (
self.theta0[i] * (theta_bounds[1] - theta_bounds[0])
+ theta_bounds[0]
)
if ii == 0 and "KPLSK" in self.name:
if self.theta0[i] - theta_bounds[1] > 0:
self.theta0[i] = theta_bounds[1] - 1e-10
else:
self.theta0[i] = theta_bounds[0] + 1e-10
else:
warnings.warn(
f"theta0 is out the feasible bounds ({self.theta0}[{i}] out of \
[{theta_bounds[0]}, {theta_bounds[1]}]). \
A random initialisation is used instead."
)
self.theta0[i] = self.random_state.rand()
self.theta0[i] = (
self.theta0[i] * (theta_bounds[1] - theta_bounds[0])
+ theta_bounds[0]
)

if self.name in ["MGP"]: # to be discussed with R. Priem
constraints.append(lambda theta, i=i: theta[i] + theta_bounds[1])
Expand Down Expand Up @@ -1958,15 +1964,15 @@ def hessian_minus_reduced_likelihood_function(log10t):
np.log10([theta_bounds]), repeats=len(theta0), axis=0
)
theta_all_loops = np.vstack((theta0, theta0_rand))

if self.options["n_start"] > 1:
sampling = LHS(
xlimits=theta_limits,
criterion="maximin",
random_state=self.random_state,
)
theta_lhs_loops = sampling(self.options["n_start"])
theta_all_loops = np.vstack((theta_all_loops, theta_lhs_loops))
if ii == 1 or "KPLSK" not in self.name:
if self.options["n_start"] > 1:
sampling = LHS(
xlimits=theta_limits,
criterion="maximin",
random_state=self.random_state,
)
theta_lhs_loops = sampling(self.options["n_start"])
theta_all_loops = np.vstack((theta_all_loops, theta_lhs_loops))

optimal_theta_res = {"fun": float("inf")}
optimal_theta_res_loop = None
Expand Down
58 changes: 58 additions & 0 deletions smt/surrogate_models/tests/test_kpls.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@

from smt.sampling_methods import LHS
from smt.surrogate_models import KPLS
from smt.surrogate_models import KRG, KPLSK
from smt.utils.misc import compute_rms_error
import time


class TestKPLS(unittest.TestCase):
Expand Down Expand Up @@ -98,6 +101,61 @@ def test_optim_TNC_Cobyla(self):
< 2
)

def test_optim_kplsk(self):
# Griewank function definition
def griewank(x):
x = np.asarray(x)
if x.ndim == 1 or max(x.shape) == 1:
x = x.reshape((1, -1))
# dim = x.shape[1]

s, p = 0.0, 1.0
for i, xi in enumerate(x.T):
s += xi**2 / 4000.0
p *= np.cos(xi / np.sqrt(i + 1))
return s - p + 1.0

lb = -5
ub = 5
n_dim = 200

# LHS training point generation
n_train = 25
sx = LHS(
xlimits=np.repeat(np.atleast_2d([0.0, 1.0]), n_dim, axis=0),
criterion="m",
random_state=42,
)
x_train = sx(n_train)
x_train = lb + (ub - lb) * x_train # map generated samples to design space
y_train = griewank(x_train)
y_train = y_train.reshape((n_train, -1)) # reshape to 2D array

# Random test point generation
n_test = 5000
x_test = np.random.random_sample((n_test, n_dim))
x_test = lb + (ub - lb) * x_test # map generated samples to design space
y_test = griewank(x_test)
y_test = y_test.reshape((n_test, -1)) # reshape to 2D array

# Surrogate model definition
n_pls = 3
models = [KRG(), KPLSK(n_comp=n_pls), KPLS(n_comp=n_pls)]
rms = []
times = []
# Surrogate model fit & error estimation
for model in models:
model.set_training_values(x_train, y_train)
intime = time.time()
model.train()
times.append(time.time() - intime)

# y_pred = model.predict_values(x_test)
error = compute_rms_error(model, x_test, y_test)
rms.append(error)
self.assertTrue((rms[0] <= rms[1]) and (rms[1] <= rms[2]))
self.assertTrue((times[0] >= times[1]) and (times[1] >= times[2]))


if __name__ == "__main__":
unittest.main()

0 comments on commit b6bd190

Please sign in to comment.