Skip to content

Commit

Permalink
fix gradient algorithm
Browse files Browse the repository at this point in the history
  • Loading branch information
Paul-Saves committed Jan 29, 2024
1 parent 9fd747a commit 32b3f3e
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 13 deletions.
24 changes: 19 additions & 5 deletions smt/applications/tests/test_vfm.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,12 @@ def test_vfm(self):
Bridge_candidate = "KRG"
type_bridge = "Multiplicative"
optionsLF = {}
optionsB = {"theta0": [1e-2] * ndim, "print_prediction": False, "deriv": False}
optionsB = {
"theta0": [1e-2] * ndim,
"print_prediction": False,
"deriv": False,
"hyper_opt": "Cobyla",
}

# Construct low/high fidelity data and validation points
sampling = LHS(xlimits=funLF.xlimits, criterion="m", random_state=42)
Expand Down Expand Up @@ -138,7 +143,12 @@ def run_vfm_example(self):
Bridge_candidate = "KRG"
type_bridge = "Multiplicative"
optionsLF = {}
optionsB = {"theta0": [1e-2] * ndim, "print_prediction": False, "deriv": False}
optionsB = {
"theta0": [1e-2] * ndim,
"print_prediction": False,
"deriv": False,
"hyper_opt": "Cobyla",
}

# Construct low/high fidelity data and validation points
sampling = LHS(xlimits=funLF.xlimits, criterion="m")
Expand Down Expand Up @@ -200,7 +210,9 @@ def test_KRG_KRG_additive(self):
yp = M.predict_values(np.atleast_2d(xt[0]))
dyp = M.predict_derivatives(np.atleast_2d(xt[0]), kx=0)
self.assert_error(yp, np.array([[0.015368, 0.367424]]), atol=2e-2, rtol=3e-2)
self.assert_error(dyp, np.array([[0.07007729, 3.619421]]), atol=3e-1, rtol=1e-2)
self.assert_error(
dyp, np.array([[-3.11718627e-03, 3.19506239e00]]), atol=3e-1, rtol=1e-2
)

def test_QP_KRG_additive(self):
with Silence():
Expand All @@ -214,7 +226,7 @@ def test_QP_KRG_additive(self):

self.assert_error(yp, np.array([[0.015368, 0.367424]]), atol=1e-2, rtol=1e-2)
self.assert_error(
dyp, np.array([[1.16130832e-03, 4.36712162e00]]), atol=3e-1, rtol=1e-2
dyp, np.array([[0.02596425, 4.70243162]]), atol=3e-1, rtol=1e-2
)

def test_KRG_KRG_mult(self):
Expand All @@ -228,7 +240,9 @@ def test_KRG_KRG_mult(self):
dyp = M.predict_derivatives(np.atleast_2d(xt[0]), kx=0)

self.assert_error(yp, np.array([[0.015368, 0.367424]]), atol=2e-2, rtol=3e-2)
self.assert_error(dyp, np.array([[0.07007729, 3.619421]]), atol=3e-1, rtol=1e-2)
self.assert_error(
dyp, np.array([[-3.11718627e-03, 3.19506239e00]]), atol=3e-1, rtol=1e-2
)

def test_QP_KRG_mult(self):
with Silence():
Expand Down
2 changes: 1 addition & 1 deletion smt/surrogate_models/krg_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -1016,7 +1016,7 @@ def _reduced_likelihood_gradient(self, theta):
gamma = par["gamma"]
Q = par["Q"]
G = par["G"]
sigma_2 = par["sigma2"]
sigma_2 = par["sigma2"] + self.options["nugget"]

nb_theta = len(theta)
grad_red = np.zeros(nb_theta)
Expand Down
8 changes: 7 additions & 1 deletion smt/surrogate_models/tests/test_krg_het_noise.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,13 @@ def test_predict_output(self):
xt_full = np.array(3 * xt.tolist())
yt_full = np.concatenate((yt, yt + 0.2 * yt_std_rand, yt - 0.2 * yt_std_rand))

sm = KRG(theta0=[1.0], eval_noise=True, use_het_noise=True, n_start=1)
sm = KRG(
theta0=[1.0],
eval_noise=True,
use_het_noise=True,
n_start=1,
hyper_opt="Cobyla",
)
sm.set_training_values(xt_full, yt_full)
sm.train()

Expand Down
2 changes: 1 addition & 1 deletion smt/tests/test_array_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_KRG(self):
d0 = interp.predict_derivatives(np.atleast_2d(xt[10, :]), 0)

self.assert_error(
d0, np.array([[0.06874097, 4.366292277996716]]), atol=0.55, rtol=0.15
d0, np.array([[0.24897752, 3.72290526]]), atol=0.55, rtol=0.15
)

def test_RBF(self):
Expand Down
7 changes: 2 additions & 5 deletions smt/tests/test_kpls_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,9 @@ def setUp(self):
n_comp_opt["Branin"] = 2
n_comp_opt["Rosenbrock"] = 1
n_comp_opt["sphere"] = 1
if platform.startswith("linux"): # result depends on platform
n_comp_opt["exp"] = 2
else:
n_comp_opt["exp"] = 3
n_comp_opt["exp"] = 2
n_comp_opt["tanh"] = 1
n_comp_opt["cos"] = 1
n_comp_opt["cos"] = 2

self.nt = nt
self.ne = ne
Expand Down

0 comments on commit 32b3f3e

Please sign in to comment.