From a5826e03a445d02d01451cdad14f956195694a4c Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 16:32:19 +0100 Subject: [PATCH 1/7] change ego test --- smt/applications/tests/test_ego.py | 32 +++++++++++++------ .../tests/test_surrogate_model_examples.py | 6 ++-- smt/utils/design_space.py | 6 ++-- 3 files changed, 28 insertions(+), 16 deletions(-) diff --git a/smt/applications/tests/test_ego.py b/smt/applications/tests/test_ego.py index eb76b2aa5..917ef387a 100644 --- a/smt/applications/tests/test_ego.py +++ b/smt/applications/tests/test_ego.py @@ -1,6 +1,6 @@ # coding: utf-8 """ -Author: Remi Lafage and Nathalie Bartoli +Author: Remi Lafage This package is distributed under New BSD license. """ @@ -985,19 +985,31 @@ def f_obj(X): LHS, design_space, criterion="ese", random_state=random_state ) Xt = sampling(n_doe) - + if ds.HAS_CONFIG_SPACE: # results differs wrt config_space impl + self.assertAlmostEqual(np.sum(Xt), 24.811925491708156, delta=1e-4) + else: + self.assertAlmostEqual(np.sum(Xt), 28.568852027679586, delta=1e-4) + Xt = np.array( + [ + [0.37454012, 1.0], + [0.95071431, 0.0], + [0.73199394, 8.0], + [0.59865848, 6.0], + [0.15601864, 7.0], + ] + ) # To start the Bayesion optimization n_iter = 2 # number of iterations - criterion = "EI" # infill criterion + criterion = "LCB" # infill criterion ego = EGO( n_iter=n_iter, criterion=criterion, xdoe=Xt, surrogate=KRG( design_space=design_space, - categorical_kernel=MixIntKernelType.CONT_RELAX, + categorical_kernel=MixIntKernelType.GOWER, theta0=[1e-2], - n_start=15, + n_start=25, corr="squar_exp", hyper_opt="Cobyla", print_global=False, @@ -1005,15 +1017,15 @@ def f_obj(X): verbose=False, enable_tunneling=False, random_state=random_state, - n_start=15, + n_start=25, ) x_opt, y_opt, dnk, x_data, y_data = ego.optimize(fun=f_obj) if ds.HAS_CONFIG_SPACE: # results differs wrt config_space impl - self.assertAlmostEqual(np.sum(y_data), 5.4385331120184475, delta=1e-3) - self.assertAlmostEqual(np.sum(x_data), 39.711522540205394, delta=1e-3) + self.assertAlmostEqual(np.sum(y_data), 8.846225704750577, delta=1e-4) + self.assertAlmostEqual(np.sum(x_data), 41.811925504901374, delta=1e-4) else: - self.assertAlmostEqual(np.sum(y_data), 1.8911720670620835, delta=1e-6) - self.assertAlmostEqual(np.sum(x_data), 47.56885202767958, delta=1e-6) + self.assertAlmostEqual(np.sum(y_data), 7.8471910288712, delta=1e-4) + self.assertAlmostEqual(np.sum(x_data), 34.81192549, delta=1e-4) def test_ego_gek(self): ego, fun = self.initialize_ego_gek() diff --git a/smt/surrogate_models/tests/test_surrogate_model_examples.py b/smt/surrogate_models/tests/test_surrogate_model_examples.py index 21a5a98de..9d7d8da7c 100644 --- a/smt/surrogate_models/tests/test_surrogate_model_examples.py +++ b/smt/surrogate_models/tests/test_surrogate_model_examples.py @@ -814,9 +814,9 @@ def df_dx(x): genn.options["hidden_layer_sizes"] = [6, 6] genn.options["alpha"] = 0.1 genn.options["lambd"] = 0.1 - genn.options[ - "gamma" - ] = 1.0 # 1 = gradient-enhanced on, 0 = gradient-enhanced off + genn.options["gamma"] = ( + 1.0 # 1 = gradient-enhanced on, 0 = gradient-enhanced off + ) genn.options["num_iterations"] = 1000 genn.options["is_backtracking"] = True genn.options["is_normalize"] = False diff --git a/smt/utils/design_space.py b/smt/utils/design_space.py index c56a172f5..df8406a99 100644 --- a/smt/utils/design_space.py +++ b/smt/utils/design_space.py @@ -523,9 +523,9 @@ def unfold_x( # The is_acting matrix is simply repeated column-wise if is_acting is not None: - is_acting_unfolded[ - :, i_x_unfold : i_x_unfold + n_dim_cat - ] = np.tile(is_acting[:, [i]], (1, n_dim_cat)) + is_acting_unfolded[:, i_x_unfold : i_x_unfold + n_dim_cat] = ( + np.tile(is_acting[:, [i]], (1, n_dim_cat)) + ) i_x_unfold += n_dim_cat From cfd167a707f7671410a8f80b5ad538ffd2e819c1 Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 16:34:06 +0100 Subject: [PATCH 2/7] ruff --- smt/applications/tests/test_ego.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/smt/applications/tests/test_ego.py b/smt/applications/tests/test_ego.py index 917ef387a..21e58e77d 100644 --- a/smt/applications/tests/test_ego.py +++ b/smt/applications/tests/test_ego.py @@ -1,6 +1,6 @@ # coding: utf-8 """ -Author: Remi Lafage +Author: Remi Lafage This package is distributed under New BSD license. """ From 71c9ee5a50040604de962a5ab2c7c5b7b5e0f3de Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 18:02:26 +0100 Subject: [PATCH 3/7] add a test --- smt/surrogate_models/krg_based.py | 8 ++ smt/surrogate_models/tests/test_krg_based.py | 98 ++++++++++++++++++++ 2 files changed, 106 insertions(+) diff --git a/smt/surrogate_models/krg_based.py b/smt/surrogate_models/krg_based.py index 6eddb4b66..896e44801 100644 --- a/smt/surrogate_models/krg_based.py +++ b/smt/surrogate_models/krg_based.py @@ -964,6 +964,11 @@ def _reduced_likelihood_function(self, theta): print("exception : ", e) print(np.linalg.eig(R)[0]) return reduced_likelihood_function_value, par + if linalg.svd(R, compute_uv=False)[-1] < 1.1 * nugget: + raise Exception( + "R is too ill conditioned. Poor combination " + "of regression model and observations." + ) # Get generalized least squared solution Ft = linalg.solve_triangular(C, self.F, lower=True) @@ -1466,6 +1471,9 @@ def _predict_values(self, x: np.ndarray, is_acting=None) -> np.ndarray: r = self._correlation_types[self.options["corr"]]( self.optimal_theta, d ).reshape(n_eval, self.nt) + r = self._correlation_types["squar_exp"](self.optimal_theta, d).reshape( + n_eval, self.nt + ) y = np.zeros(n_eval) # Compute the regression function f = self._regression_types[self.options["poly"]](X_cont) diff --git a/smt/surrogate_models/tests/test_krg_based.py b/smt/surrogate_models/tests/test_krg_based.py index a7a7d3142..b99f1af81 100644 --- a/smt/surrogate_models/tests/test_krg_based.py +++ b/smt/surrogate_models/tests/test_krg_based.py @@ -8,6 +8,14 @@ import numpy as np from smt.surrogate_models.krg_based import KrgBased +from smt.surrogate_models import KRG +import openmdao.api as om + + +# defining the toy example +def target_fun(x): + return np.cos(5 * x) + class TestKrgBased(unittest.TestCase): def test_theta0_default_init(self): @@ -29,6 +37,96 @@ def test_theta0_erroneous_init(self): krg.set_training_values(np.array([[1, 2, 3]]), np.array([[1]])) # erroneous self.assertRaises(ValueError, krg._check_param) + def test_almost_squar_exp(self): + nobs = 50 # number of obsertvations + np.random.seed(0) # a seed for reproducibility + xt = np.random.uniform(size=nobs) # design points + + # adding a random noise to observations + yt = target_fun(xt) + np.random.normal(scale=0.05, size=nobs) + + # training the model with the option eval_noise= True + sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.9999) + sm.set_training_values(xt, yt) + + self.assertRaises(Exception, sm.train) + + def test_less_almost_squar_exp(self): + nobs = 50 # number of obsertvations + np.random.seed(0) # a seed for reproducibility + xt = np.random.uniform(size=nobs) # design points + + # adding a random noise to observations + yt = target_fun(xt) + np.random.normal(scale=0.05, size=nobs) + + # training the model with the option eval_noise= True + sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.999) + sm.set_training_values(xt, yt) + sm.train() + + # predictions + x = np.linspace(0, 1, 500).reshape(-1, 1) + sm.predict_values(x) # predictive mean + sm.predict_variances(x) # predictive variance + sm.predict_derivatives(x, 0) # predictive variance + + class test_krg_derivs(om.ExplicitComponent): + def setup(self): + # Inputs + self.add_input("x", 0.0) + # Outputs + self.add_output("y", 0.0) + + # Other dependencies + self.declare_partials(of="y", wrt=["x"]) + + def compute(self, inputs, outputs): + """Considering the entire rotor as a single disc that extracts + velocity uniformly from the incoming flow and converts it to + power.""" + + x = inputs["x"] + outputs["y"] = sm.predict_values(x) + + def compute_partials(self, inputs, partials): + x = inputs["x"] + partials["y", "x"] = sm.predict_derivatives(x, 0) + + p = om.Problem(model=om.Group()) + ivc = p.model.add_subsystem("vars", om.IndepVarComp()) + ivc.add_output("x", shape=1) + p.model.add_subsystem("test_krg_derivs", test_krg_derivs()) + p.model.connect("vars.x", "test_krg_derivs.x") + p.setup(force_alloc_complex=True) + p.set_val("vars.x", val=1.0) + p.run_model() + cpd = p.check_partials(method="fd", compact_print=True, step=1.0e-7) + derivs = list((list((list(cpd.values())[0]).values())[0]).values()) + self.assertTrue(abs(derivs[0] - derivs[1]) > 1000) + + # training the model with the option eval_noise= True + sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.99) + sm.set_training_values(xt, yt) + sm.train() + + # predictions + x = np.linspace(0, 1, 500).reshape(-1, 1) + sm.predict_values(x) # predictive mean + sm.predict_variances(x) # predictive variance + sm.predict_derivatives(x, 0) # predictive variance + + p = om.Problem(model=om.Group()) + ivc = p.model.add_subsystem("vars", om.IndepVarComp()) + ivc.add_output("x", shape=1) + p.model.add_subsystem("test_krg_derivs", test_krg_derivs()) + p.model.connect("vars.x", "test_krg_derivs.x") + p.setup(force_alloc_complex=True) + p.set_val("vars.x", val=1.0) + p.run_model() + cpd = p.check_partials(method="fd", compact_print=True, step=1.0e-7) + derivs = list((list((list(cpd.values())[0]).values())[0]).values()) + self.assertTrue(abs(derivs[0] - derivs[1]) < 5e-2) + if __name__ == "__main__": unittest.main() From 233e9eb356d090a8731e3b14b84fc7eaa68e05fe Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 18:38:13 +0100 Subject: [PATCH 4/7] add warning --- smt/surrogate_models/krg_based.py | 2 +- smt/surrogate_models/tests/test_krg_based.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/smt/surrogate_models/krg_based.py b/smt/surrogate_models/krg_based.py index 896e44801..7d3bd05da 100644 --- a/smt/surrogate_models/krg_based.py +++ b/smt/surrogate_models/krg_based.py @@ -965,7 +965,7 @@ def _reduced_likelihood_function(self, theta): print(np.linalg.eig(R)[0]) return reduced_likelihood_function_value, par if linalg.svd(R, compute_uv=False)[-1] < 1.1 * nugget: - raise Exception( + warnings.warn( "R is too ill conditioned. Poor combination " "of regression model and observations." ) diff --git a/smt/surrogate_models/tests/test_krg_based.py b/smt/surrogate_models/tests/test_krg_based.py index b99f1af81..520a650c2 100644 --- a/smt/surrogate_models/tests/test_krg_based.py +++ b/smt/surrogate_models/tests/test_krg_based.py @@ -49,7 +49,7 @@ def test_almost_squar_exp(self): sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.9999) sm.set_training_values(xt, yt) - self.assertRaises(Exception, sm.train) + self.assertWarns(UserWarning, sm.train) def test_less_almost_squar_exp(self): nobs = 50 # number of obsertvations From 0a33d1aa232c4cb191e93eb0fd3b222e190a0b69 Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 18:57:46 +0100 Subject: [PATCH 5/7] fix a bug --- smt/surrogate_models/krg_based.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/smt/surrogate_models/krg_based.py b/smt/surrogate_models/krg_based.py index 7d3bd05da..f06443827 100644 --- a/smt/surrogate_models/krg_based.py +++ b/smt/surrogate_models/krg_based.py @@ -1471,9 +1471,6 @@ def _predict_values(self, x: np.ndarray, is_acting=None) -> np.ndarray: r = self._correlation_types[self.options["corr"]]( self.optimal_theta, d ).reshape(n_eval, self.nt) - r = self._correlation_types["squar_exp"](self.optimal_theta, d).reshape( - n_eval, self.nt - ) y = np.zeros(n_eval) # Compute the regression function f = self._regression_types[self.options["poly"]](X_cont) From b02ec07ca17d92d9faa55448856320671b014651 Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 19:02:22 +0100 Subject: [PATCH 6/7] ruff format --- smt/applications/mfk.py | 11 ++++----- smt/applications/tests/test_mfk_variance.py | 1 - smt/applications/tests/test_mfkpls.py | 1 - smt/applications/tests/test_mfkplsk.py | 1 + smt/utils/neural_net/model.py | 6 ++--- smt/utils/options_dictionary.py | 25 ++++++++++++--------- 6 files changed, 22 insertions(+), 23 deletions(-) diff --git a/smt/applications/mfk.py b/smt/applications/mfk.py index 7da2ce83b..7ddbdfd79 100644 --- a/smt/applications/mfk.py +++ b/smt/applications/mfk.py @@ -764,9 +764,7 @@ def predict_variances_all_levels(self, X, is_acting=None): sigma2 = self.optimal_par[0]["sigma2"] / self.y_std**2 MSE[:, 0] = sigma2 * ( # 1 + self.optimal_noise_all[0] - (r_t ** 2).sum(axis=0) + (u_ ** 2).sum(axis=0) - 1 - - (r_t**2).sum(axis=0) - + (u_**2).sum(axis=0) + 1 - (r_t**2).sum(axis=0) + (u_**2).sum(axis=0) ) # Calculate recursively kriging variance at level i @@ -850,7 +848,8 @@ def predict_variances_all_levels(self, X, is_acting=None): Q_ = (np.dot((yt - np.dot(Ft, beta)).T, yt - np.dot(Ft, beta)))[0, 0] MSE[:, i] = ( # sigma2_rho * MSE[:, i - 1] - +Q_ / (2 * (self.nt_all[i] - p - q)) + +Q_ + / (2 * (self.nt_all[i] - p - q)) # * (1 + self.optimal_noise_all[i] - (r_t ** 2).sum(axis=0)) * (1 - (r_t**2).sum(axis=0)) + sigma2 * (u_**2).sum(axis=0) @@ -858,9 +857,7 @@ def predict_variances_all_levels(self, X, is_acting=None): else: MSE[:, i] = sigma2 * ( # 1 + self.optimal_noise_all[i] - (r_t ** 2).sum(axis=0) + (u_ ** 2).sum(axis=0) - 1 - - (r_t**2).sum(axis=0) - + (u_**2).sum(axis=0) + 1 - (r_t**2).sum(axis=0) + (u_**2).sum(axis=0) ) # + sigma2_rho * MSE[:, i - 1] if self.options["propagate_uncertainty"]: MSE[:, i] = MSE[:, i] + sigma2_rho * MSE[:, i - 1] diff --git a/smt/applications/tests/test_mfk_variance.py b/smt/applications/tests/test_mfk_variance.py index a094b6b96..f2fcf3416 100644 --- a/smt/applications/tests/test_mfk_variance.py +++ b/smt/applications/tests/test_mfk_variance.py @@ -12,7 +12,6 @@ https://doi.org/10.1080/00401706.2014.928233 """ - import numpy as np from smt.applications.mfk import MFK, NestedLHS from smt.sampling_methods import LHS diff --git a/smt/applications/tests/test_mfkpls.py b/smt/applications/tests/test_mfkpls.py index b6623fe98..69938ddf1 100644 --- a/smt/applications/tests/test_mfkpls.py +++ b/smt/applications/tests/test_mfkpls.py @@ -7,7 +7,6 @@ Adapted to new SMT version in march 2020 by Nathalie Bartoli """ - try: import matplotlib diff --git a/smt/applications/tests/test_mfkplsk.py b/smt/applications/tests/test_mfkplsk.py index cf9719a94..480ac6ccd 100644 --- a/smt/applications/tests/test_mfkplsk.py +++ b/smt/applications/tests/test_mfkplsk.py @@ -6,6 +6,7 @@ Adapted to new SMT version in march 2020 by Nathalie Bartoli """ + try: import matplotlib diff --git a/smt/utils/neural_net/model.py b/smt/utils/neural_net/model.py index de7094545..c4f288c03 100644 --- a/smt/utils/neural_net/model.py +++ b/smt/utils/neural_net/model.py @@ -223,9 +223,9 @@ def train( # Compute average cost and print output avg_cost = np.mean(optimizer.cost_history).squeeze() - self._training_history["epoch_" + str(e)][ - "batch_" + str(b) - ] = optimizer.cost_history + self._training_history["epoch_" + str(e)]["batch_" + str(b)] = ( + optimizer.cost_history + ) if not silent: print( diff --git a/smt/utils/options_dictionary.py b/smt/utils/options_dictionary.py index c1c148252..cd7ab780e 100644 --- a/smt/utils/options_dictionary.py +++ b/smt/utils/options_dictionary.py @@ -82,14 +82,17 @@ def _assert_valid(self, name, value): types = self._declared_entries[name]["types"] if values is not None and types is not None: - assert value in values or isinstance( - value, types - ), "Option %s: value and type of %s are both invalid - " % ( - name, - value, - ) + "value must be %s or type must be %s" % ( - values, - types, + assert value in values or isinstance(value, types), ( + "Option %s: value and type of %s are both invalid - " + % ( + name, + value, + ) + + "value must be %s or type must be %s" + % ( + values, + types, + ) ) elif values is not None: assert value in values, "Option %s: value %s is invalid - must be %s" % ( @@ -98,9 +101,9 @@ def _assert_valid(self, name, value): values, ) elif types is not None: - assert isinstance( - value, types - ), "Option %s: type of %s is invalid - must be %s" % (name, value, types) + assert isinstance(value, types), ( + "Option %s: type of %s is invalid - must be %s" % (name, value, types) + ) def update(self, dict_): """ From 6605ebb03e10703e8156b280420957503366897c Mon Sep 17 00:00:00 2001 From: Paul-Saves Date: Tue, 19 Mar 2024 19:30:16 +0100 Subject: [PATCH 7/7] remove openmdao --- smt/surrogate_models/tests/test_krg_based.py | 65 +++----------------- 1 file changed, 7 insertions(+), 58 deletions(-) diff --git a/smt/surrogate_models/tests/test_krg_based.py b/smt/surrogate_models/tests/test_krg_based.py index 520a650c2..eaad6b828 100644 --- a/smt/surrogate_models/tests/test_krg_based.py +++ b/smt/surrogate_models/tests/test_krg_based.py @@ -9,7 +9,6 @@ from smt.surrogate_models.krg_based import KrgBased from smt.surrogate_models import KRG -import openmdao.api as om # defining the toy example @@ -59,51 +58,6 @@ def test_less_almost_squar_exp(self): # adding a random noise to observations yt = target_fun(xt) + np.random.normal(scale=0.05, size=nobs) - # training the model with the option eval_noise= True - sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.999) - sm.set_training_values(xt, yt) - sm.train() - - # predictions - x = np.linspace(0, 1, 500).reshape(-1, 1) - sm.predict_values(x) # predictive mean - sm.predict_variances(x) # predictive variance - sm.predict_derivatives(x, 0) # predictive variance - - class test_krg_derivs(om.ExplicitComponent): - def setup(self): - # Inputs - self.add_input("x", 0.0) - # Outputs - self.add_output("y", 0.0) - - # Other dependencies - self.declare_partials(of="y", wrt=["x"]) - - def compute(self, inputs, outputs): - """Considering the entire rotor as a single disc that extracts - velocity uniformly from the incoming flow and converts it to - power.""" - - x = inputs["x"] - outputs["y"] = sm.predict_values(x) - - def compute_partials(self, inputs, partials): - x = inputs["x"] - partials["y", "x"] = sm.predict_derivatives(x, 0) - - p = om.Problem(model=om.Group()) - ivc = p.model.add_subsystem("vars", om.IndepVarComp()) - ivc.add_output("x", shape=1) - p.model.add_subsystem("test_krg_derivs", test_krg_derivs()) - p.model.connect("vars.x", "test_krg_derivs.x") - p.setup(force_alloc_complex=True) - p.set_val("vars.x", val=1.0) - p.run_model() - cpd = p.check_partials(method="fd", compact_print=True, step=1.0e-7) - derivs = list((list((list(cpd.values())[0]).values())[0]).values()) - self.assertTrue(abs(derivs[0] - derivs[1]) > 1000) - # training the model with the option eval_noise= True sm = KRG(eval_noise=False, corr="pow_exp", pow_exp_power=1.99) sm.set_training_values(xt, yt) @@ -114,18 +68,13 @@ def compute_partials(self, inputs, partials): sm.predict_values(x) # predictive mean sm.predict_variances(x) # predictive variance sm.predict_derivatives(x, 0) # predictive variance - - p = om.Problem(model=om.Group()) - ivc = p.model.add_subsystem("vars", om.IndepVarComp()) - ivc.add_output("x", shape=1) - p.model.add_subsystem("test_krg_derivs", test_krg_derivs()) - p.model.connect("vars.x", "test_krg_derivs.x") - p.setup(force_alloc_complex=True) - p.set_val("vars.x", val=1.0) - p.run_model() - cpd = p.check_partials(method="fd", compact_print=True, step=1.0e-7) - derivs = list((list((list(cpd.values())[0]).values())[0]).values()) - self.assertTrue(abs(derivs[0] - derivs[1]) < 5e-2) + self.assertLess( + np.abs( + sm.predict_derivatives(x[20], 0) + - (sm.predict_values(x[20] + 1e-6) - sm.predict_values(x[20])) / 1e-6 + ), + 1e-2, + ) if __name__ == "__main__":