-
If you remember for ML models, lets say SVM, we had different choice of kernels and each kernel has its own hyper-parameter lets say gamma and C
-
For ML model, sklearn provides GridSearch to obtain the best model hyper-parameters
-
For DL, we even have more degrees of freedom (parameters) including: dropout, number of neurons, batch size, DL optimizer methods, type of activation function, so we need to obtain the best combination of these
-
A very simple convenience wrapper around hyperopt for fast prototyping with keras models
-
pip install hyperas
from __future__ import print_function
import numpy as np
from hyperopt import Trials, STATUS_OK, tpe
from keras.datasets import mnist
from keras.layers.core import Dense, Dropout, Activation
from keras.models import Sequential
from keras.utils import np_utils
from hyperas import optim
from hyperas.distributions import choice, uniform
def data():
"""
Data providing function:
This function is separated from create_model() so that hyperopt
won't reload data for each evaluation run.
"""
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
nb_classes = 10
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train):
"""
Model providing function:
Create Keras model with double curly brackets dropped-in as needed.
Return value has to be a valid python dictionary with two customary keys:
- loss: Specify a numeric evaluation metric to be minimized
- status: Just use STATUS_OK and see hyperopt documentation if not feasible
The last one is optional, though recommended, namely:
- model: specify the model just created so that we can later use it again.
"""
model = Sequential()
model.add(Dense(512, input_shape=(784,)))
model.add(Activation('relu'))
model.add(Dropout({{uniform(0, 1)}}))
model.add(Dense({{choice([256, 512, 1024])}}))
model.add(Activation({{choice(['relu', 'sigmoid'])}}))
model.add(Dropout({{uniform(0, 1)}}))
# If we choose 'four', add an additional fourth layer
if {{choice(['three', 'four'])}} == 'four':
model.add(Dense(100))
# We can also choose between complete sets of layers
model.add({{choice([Dropout(0.5), Activation('linear')])}})
model.add(Activation('relu'))
model.add(Dense(10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', metrics=['accuracy'],
optimizer={{choice(['rmsprop', 'adam', 'sgd'])}})
result = model.fit(x_train, y_train,
batch_size={{choice([64, 128])}},
epochs=2,
verbose=2,
validation_split=0.1)
# get the highest validation accuracy of the training epochs
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
if __name__ == '__main__':
best_run, best_model = optim.minimize(model=create_model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials())
X_train, Y_train, X_test, Y_test = data()
print("Evalutation of best performing model:")
print(best_model.evaluate(X_test, Y_test))
print("Best performing model chosen hyper-parameters:")
print(best_run)
https://www.kaggle.com/kt66nf/hyperparameter-optimization-using-keras-hyperas