Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix differential evolution #1150

Merged
merged 7 commits into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
## Dependencies
- Allow numpy >= 2.x (#1146)

## Bugfixes
- Fix bug in differential evolution acquisition maximizer in case the search space contains categorical hyperparameters (#1150)

# Examples
- Add warmstarting example (#1120)

Expand Down
78 changes: 68 additions & 10 deletions smac/acquisition/maximizer/differential_evolution.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
from __future__ import annotations

import numpy as np
from ConfigSpace import Configuration
from ConfigSpace import Configuration, ConfigurationSpace
from scipy.optimize._differentialevolution import DifferentialEvolutionSolver

from smac.acquisition.function.abstract_acquisition_function import (
AbstractAcquisitionFunction,
)
from smac.acquisition.maximizer import AbstractAcquisitionMaximizer
from smac.utils.configspace import transform_continuous_designs

__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
Expand All @@ -23,8 +27,46 @@ class DifferentialEvolution(AbstractAcquisitionMaximizer):
[1] Storn, R and Price, K, Differential Evolution - a Simple and Efficient Heuristic for Global
Optimization over Continuous Spaces, Journal of Global Optimization, 1997, 11, 341 - 359.
Parameters
----------
configspace : ConfigurationSpace
acquisition_function : AbstractAcquisitionFunction
challengers : int, defaults to 50000
Number of challengers.
max_iter: int | None, defaults to None
Maximum number of iterations that the DE will perform.
strategy: str, defaults to "best1bin"
The strategy to use for the DE.
polish: bool, defaults to True
Whether to polish the final solution using L-BFGS-B.
mutation: tuple[float, float], defaults to (0.5, 1.0)
The mutation constant.
recombination: float, defaults to 0.7
The recombination constant.
seed : int, defaults to 0
"""

def __init__(
self,
configspace: ConfigurationSpace,
acquisition_function: AbstractAcquisitionFunction | None = None,
max_iter: int = 1000,
challengers: int = 50000,
strategy: str = "best1bin",
polish: bool = True,
mutation: tuple[float, float] = (0.5, 1.0),
recombination: float = 0.7,
seed: int = 0,
):
super().__init__(configspace, acquisition_function, challengers, seed)
# raise NotImplementedError("DifferentialEvolution is not yet implemented.")
self.max_iter = max_iter
self.strategy = strategy
self.polish = polish
self.mutation = mutation
self.recombination = recombination

def _maximize(
self,
previous_configs: list[Configuration],
Expand All @@ -36,30 +78,46 @@ def _maximize(

def func(x: np.ndarray) -> np.ndarray:
assert self._acquisition_function is not None
return -self._acquisition_function([Configuration(self._configspace, vector=x)])
if len(x.shape) == 1:
return -self._acquisition_function(
[
transform_continuous_designs(
design=np.expand_dims(x, axis=0),
origin="Diffrential Evolution",
configspace=self._configspace,
)[0]
]
)
return -self._acquisition_function(
transform_continuous_designs(design=x.T, origin="Diffrential Evolution", configspace=self._configspace)
)

ds = DifferentialEvolutionSolver(
func,
bounds=[[0, 1] for _ in range(len(self._configspace))],
args=(),
strategy="best1bin",
maxiter=1000,
popsize=50,
strategy=self.strategy,
maxiter=self.max_iter,
popsize=self._challengers // self.max_iter,
tol=0.01,
mutation=(0.5, 1),
recombination=0.7,
mutation=self.mutation,
recombination=self.recombination,
seed=self._rng.randint(1000),
polish=True,
polish=self.polish,
callback=None,
disp=False,
init="latinhypercube",
atol=0,
vectorized=True,
)

_ = ds.solve()
for pop, val in zip(ds.population, ds.population_energies):
rc = Configuration(self._configspace, vector=pop)
rc.origin = "Acquisition Function Maximizer: Differential Evolution"
rc = transform_continuous_designs(
design=np.expand_dims(pop, axis=0),
origin="Acquisition Function Maximizer: Differential Evolution",
configspace=self._configspace,
)[0]
configs.append((-val, rc))

configs.sort(key=lambda t: t[0])
Expand Down
54 changes: 0 additions & 54 deletions smac/initial_design/abstract_initial_design.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,57 +155,3 @@ def select_configurations(self) -> list[Configuration]:
def _select_configurations(self) -> list[Configuration]:
"""Selects the initial configurations, depending on the implementation of the initial design."""
raise NotImplementedError

def _transform_continuous_designs(
self, design: np.ndarray, origin: str, configspace: ConfigurationSpace
) -> list[Configuration]:
"""Transforms the continuous designs into a discrete list of configurations.

Parameters
----------
design : np.ndarray
Array of hyperparameters originating from the initial design strategy.
origin : str | None, defaults to None
Label for a configuration where it originated from.
configspace : ConfigurationSpace

Returns
-------
configs : list[Configuration]
Continuous transformed configs.
"""
params = list(configspace.values())
for idx, param in enumerate(params):
if isinstance(param, IntegerHyperparameter):
design[:, idx] = param.to_vector(param.to_value(design[:, idx]))
elif isinstance(param, NumericalHyperparameter):
continue
elif isinstance(param, Constant):
design_ = np.zeros(np.array(design.shape) + np.array((0, 1)))
design_[:, :idx] = design[:, :idx]
design_[:, idx + 1 :] = design[:, idx:]
design = design_
elif isinstance(param, CategoricalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.choices), dtype=int)
elif isinstance(param, OrdinalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.sequence), dtype=int)
else:
raise ValueError("Hyperparameter not supported when transforming a continuous design.")

configs = []
for vector in design:
try:
conf = deactivate_inactive_hyperparameters(
configuration=None, configuration_space=configspace, vector=vector
)
except ForbiddenValueError:
continue

conf.origin = origin
configs.append(conf)

return configs
3 changes: 2 additions & 1 deletion smac/initial_design/latin_hypercube_design.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from scipy.stats.qmc import LatinHypercube

from smac.initial_design.abstract_initial_design import AbstractInitialDesign
from smac.utils.configspace import transform_continuous_designs

__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
Expand All @@ -25,6 +26,6 @@ def _select_configurations(self) -> list[Configuration]:

lhd = LatinHypercube(d=len(params) - constants, seed=self._rng.randint(0, 1000000)).random(n=self._n_configs)

return self._transform_continuous_designs(
return transform_continuous_designs(
design=lhd, origin="Initial Design: Latin Hypercube", configspace=self._configspace
)
4 changes: 3 additions & 1 deletion smac/initial_design/sobol_design.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from scipy.stats.qmc import Sobol

from smac.initial_design.abstract_initial_design import AbstractInitialDesign
from smac.utils.configspace import transform_continuous_designs


__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
Expand Down Expand Up @@ -43,6 +45,6 @@ def _select_configurations(self) -> list[Configuration]:
warnings.simplefilter("ignore")
sobol = sobol_gen.random(self._n_configs)

return self._transform_continuous_designs(
return transform_continuous_designs(
design=sobol, origin="Initial Design: Sobol", configspace=self._configspace
)
63 changes: 62 additions & 1 deletion smac/utils/configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,19 @@
BetaIntegerHyperparameter,
CategoricalHyperparameter,
Constant,
IntegerHyperparameter,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
NumericalHyperparameter,
OrdinalHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
from ConfigSpace.util import get_one_exchange_neighbourhood
from ConfigSpace.util import (
ForbiddenValueError,
deactivate_inactive_hyperparameters,
get_one_exchange_neighbourhood,
)

__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
Expand Down Expand Up @@ -182,6 +188,61 @@ def print_config_changes(
logger.debug(msg)


def transform_continuous_designs(
design: np.ndarray, origin: str, configspace: ConfigurationSpace
) -> list[Configuration]:
"""Transforms the continuous designs into a discrete list of configurations.
Parameters
----------
design : np.ndarray
Array of hyperparameters originating from the initial design strategy.
origin : str | None, defaults to None
Label for a configuration where it originated from.
configspace : ConfigurationSpace
Returns
-------
configs : list[Configuration]
Continuous transformed configs.
"""
params = configspace.get_hyperparameters()
for idx, param in enumerate(params):
if isinstance(param, IntegerHyperparameter):
design[:, idx] = param._inverse_transform(param._transform(design[:, idx]))
elif isinstance(param, NumericalHyperparameter):
continue
elif isinstance(param, Constant):
design_ = np.zeros(np.array(design.shape) + np.array((0, 1)))
design_[:, :idx] = design[:, :idx]
design_[:, idx + 1 :] = design[:, idx:]
design = design_
elif isinstance(param, CategoricalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.choices), dtype=int)
elif isinstance(param, OrdinalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.sequence), dtype=int)
else:
raise ValueError("Hyperparameter not supported when transforming a continuous design.")

configs = []
for vector in design:
try:
conf = deactivate_inactive_hyperparameters(
configuration=None, configuration_space=configspace, vector=vector
)
except ForbiddenValueError:
continue

conf.origin = origin
configs.append(conf)

return configs


# def check_subspace_points(
# X: np.ndarray,
# cont_dims: np.ndarray | list = [],
Expand Down
53 changes: 51 additions & 2 deletions tests/test_acquisition/test_maximizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,20 @@ def configspace() -> ConfigurationSpace:
return cs


@pytest.fixture
def configspace_categorical() -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)

a = Categorical("a", ["c1", "c2", "c3"])
b = Categorical("b", ["c1", "c2", "c3", "c4"])
c = Float("c", (0, 1), default=0.5)

# Add all hyperparameters at once:
cs.add([a, b, c])

return cs


@pytest.fixture
def model(configspace: ConfigurationSpace):
model = RandomForest(configspace)
Expand Down Expand Up @@ -256,6 +270,13 @@ def test_local_search_2(configspace, acquisition_function):
assert values[0][0] >= values[1][0]


def test_local_search_categorical(configspace_categorical, acquisition_function):
start_points = configspace_categorical.sample_configuration(100)
ls = LocalSearch(configspace_categorical, acquisition_function, max_steps=100)

values = ls._maximize(start_points, 1)


def test_get_initial_points_moo(configspace):
class Model:
def predict_marginalized(self, X):
Expand Down Expand Up @@ -302,6 +323,13 @@ def test_random_search(configspace, acquisition_function):
assert all([v[0] == 0 for v in values])


def test_random_search_categorical(configspace_categorical, acquisition_function):
start_points = configspace_categorical.sample_configuration(100)
rs = RandomSearch(configspace_categorical, acquisition_function)

values = rs._maximize(start_points, 1)


def test_random_search_sorted(configspace, acquisition_function):
start_points = configspace.sample_configuration(100)
rs = RandomSearch(configspace, acquisition_function, challengers=1000)
Expand All @@ -317,6 +345,13 @@ def test_random_search_sorted(configspace, acquisition_function):
assert all([v[0] > 0 for v in values])


def test_sorted_random_search_categorical(configspace_categorical, acquisition_function):
start_points = configspace_categorical.sample_configuration(100)
rs = RandomSearch(configspace_categorical, acquisition_function)

values = rs._maximize(start_points, 1, _sorted=True)


# --------------------------------------------------------------
# TestLocalAndRandomSearch
# --------------------------------------------------------------
Expand All @@ -343,6 +378,13 @@ def test_local_and_random_search(configspace, acquisition_function):
assert "Acquisition Function Maximizer: Local Search" in config_origins


def test_local_and_random_search_categorical(configspace_categorical, acquisition_function):
start_points = configspace_categorical.sample_configuration(100)
rs = LocalAndSortedRandomSearch(configspace_categorical, acquisition_function, max_steps=100)

values = rs._maximize(start_points, 1)


# --------------------------------------------------------------
# TestLocalAndSortedPriorRandomSearch
# --------------------------------------------------------------
Expand Down Expand Up @@ -423,7 +465,14 @@ def __call__(self, arrays):

def test_differential_evolution(configspace, acquisition_function):
start_points = configspace.sample_configuration(100)
rs = DifferentialEvolution(configspace, acquisition_function, challengers=1000)
de = DifferentialEvolution(configspace, acquisition_function, challengers=1000)

values = rs._maximize(start_points, 1)
values = de._maximize(start_points, 1)
values[0][1].origin == "Acquisition Function Maximizer: Differential Evolution"


def test_differential_evolution_categorical(configspace_categorical, acquisition_function):
start_points = configspace_categorical.sample_configuration(100)
de = DifferentialEvolution(configspace_categorical, acquisition_function)

values = de._maximize(start_points, 1)
Loading