如何将KerasClassifier,Hyperopt和Sklearn交叉验证放在一起 [英] How to put KerasClassifier, Hyperopt and Sklearn cross-validation together
问题描述
我正在使用Keras模型上的sklearn执行超参数调整优化(hyperopt)任务.我正在尝试使用Sklearn交叉验证来优化KerasClassifiers,以下代码如下:
def create_model():
model = Sequential()
model.add(
Dense(output_dim=params['units1'],
input_dim=features_.shape[1],
kernel_initializer="glorot_uniform"))
model.add(Activation(params['activation']))
model.add(Dropout(params['dropout1']))
model.add(BatchNormalization())
...
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
return model
现在我要做的是使用以下方式将Hyperopt参数传递给KerasClassifier
def objective(params, n_folds=N_FOLDS):
"""Objective function for Hyperparameter Optimization"""
# Keep track of evals
global ITERATION
ITERATION += 1
clf = KerasClassifier(build_fn=create_model,**params)
start = timer()
# Perform n_folds cross validation
cv_results = cross_val_score(clf,
features_,
labels,
cv=5
).mean()
run_time = timer() - start
# Loss must be minimized
loss = -cv_results
# Dictionary with information for evaluation
return {
'loss': loss,
'params': params,
'iteration': ITERATION,
'train_time': run_time,
'status': STATUS_OK
}
我将搜索空间定义为:
space = {'units1': hp.choice('units1', [64, 128, 256, 512]),
'units2': hp.choice('units2', [64, 128, 256, 512]),
'dropout1': hp.choice('dropout1', [0.25, 0.5, 0.75]),
'dropout2': hp.choice('dropout2', [0.25, 0.5, 0.75]),
'batch_size': hp.choice('batch_size', [10, 20, 40, 60, 80, 100]),
'nb_epochs': hp.choice('nb_epochs', [10, 50, 100]),
'optimizer': opt_search_space,
'activation': 'relu' }
运行优化
best = fmin(fn = objective, space = space, algo = tpe.suggest,
max_evals = MAX_EVALS, trials = bayes_trials, rstate = np.random.RandomState(50))
但是它无法给出此错误:
ValueError:激活不是合法参数
什么是正确的方法?
将超级参数作为create_model
函数的输入参数.然后,您可以输入params
字典.还要在搜索空间中将键nb_epochs
更改为epochs
.在此处详细了解其他有效参数.
尝试以下简化示例.
import numpy as np
import pandas as pd
from sklearn.datasets import make_classification
from sklearn.model_selection import cross_val_score
from tensorflow.keras import Sequential
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dense, Dropout
import time
def timer():
now = time.localtime(time.time())
return now[5]
X, y = make_classification(n_samples=1000, n_classes=2,
n_informative=4, weights=[0.7, 0.3],
random_state=0)
定义keras
模型:
def create_model(units1, activation, dropout):
model = Sequential()
model.add(Dense(units1,
input_dim=X.shape[1],
kernel_initializer="glorot_uniform",
activation=activation))
model.add(Dropout(dropout))
model.add(Dense(1,activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
return model
def objective(params, n_folds=2):
"""Objective function for Hyperparameter Optimization"""
# Keep track of evals
global ITERATION
ITERATION += 1
clf = KerasClassifier(build_fn=create_model,**params)
start = timer()
# Perform n_folds cross validation
cv_results = cross_val_score(clf, X, y,
cv=5,
).mean()
run_time = timer() - start
# Loss must be minimized
loss = -cv_results
# Dictionary with information for evaluation
return {
'loss': loss,
'params': params,
'iteration': ITERATION,
'train_time': run_time,
'status': STATUS_OK
}
from hyperopt import fmin, tpe, hp, Trials, STATUS_OK
space = {'units1': hp.choice('units1', [12, 64]),
'dropout': hp.choice('dropout1', [0.25, 0.5]),
'batch_size': hp.choice('batch_size', [10, 20]),
'epochs': hp.choice('nb_epochs', [2, 3]),
'activation': 'relu'
}
global ITERATION
ITERATION = 0
bayes_trials = Trials()
best = fmin(fn = objective, space = space, algo = tpe.suggest,
max_evals = 5, trials = bayes_trials, rstate = np.random.RandomState(50))
I am performing a hyperparameter tuning optimization (hyperopt) tasks with sklearn on a Keras models. I am trying to optimize KerasClassifiers using the Sklearn cross-validation, Some code follows:
def create_model():
model = Sequential()
model.add(
Dense(output_dim=params['units1'],
input_dim=features_.shape[1],
kernel_initializer="glorot_uniform"))
model.add(Activation(params['activation']))
model.add(Dropout(params['dropout1']))
model.add(BatchNormalization())
...
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
return model
Now what I want to do is to pass the Hyperopt params to KerasClassifier using the following way
def objective(params, n_folds=N_FOLDS):
"""Objective function for Hyperparameter Optimization"""
# Keep track of evals
global ITERATION
ITERATION += 1
clf = KerasClassifier(build_fn=create_model,**params)
start = timer()
# Perform n_folds cross validation
cv_results = cross_val_score(clf,
features_,
labels,
cv=5
).mean()
run_time = timer() - start
# Loss must be minimized
loss = -cv_results
# Dictionary with information for evaluation
return {
'loss': loss,
'params': params,
'iteration': ITERATION,
'train_time': run_time,
'status': STATUS_OK
}
I define the search space as:
space = {'units1': hp.choice('units1', [64, 128, 256, 512]),
'units2': hp.choice('units2', [64, 128, 256, 512]),
'dropout1': hp.choice('dropout1', [0.25, 0.5, 0.75]),
'dropout2': hp.choice('dropout2', [0.25, 0.5, 0.75]),
'batch_size': hp.choice('batch_size', [10, 20, 40, 60, 80, 100]),
'nb_epochs': hp.choice('nb_epochs', [10, 50, 100]),
'optimizer': opt_search_space,
'activation': 'relu' }
Run optimization
best = fmin(fn = objective, space = space, algo = tpe.suggest,
max_evals = MAX_EVALS, trials = bayes_trials, rstate = np.random.RandomState(50))
But it fails giving this error:
ValueError: activation is not a legal parameter
What's the right way to do it?
Make the hyper parameter as the input parameters for create_model
function. Then you can feed params
dict. Also change the key nb_epochs
into epochs
in the search space. Read more about the other valid parameter here.
Try the following simplified example of your's.
import numpy as np
import pandas as pd
from sklearn.datasets import make_classification
from sklearn.model_selection import cross_val_score
from tensorflow.keras import Sequential
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dense, Dropout
import time
def timer():
now = time.localtime(time.time())
return now[5]
X, y = make_classification(n_samples=1000, n_classes=2,
n_informative=4, weights=[0.7, 0.3],
random_state=0)
Defining keras
model:
def create_model(units1, activation, dropout):
model = Sequential()
model.add(Dense(units1,
input_dim=X.shape[1],
kernel_initializer="glorot_uniform",
activation=activation))
model.add(Dropout(dropout))
model.add(Dense(1,activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
return model
def objective(params, n_folds=2):
"""Objective function for Hyperparameter Optimization"""
# Keep track of evals
global ITERATION
ITERATION += 1
clf = KerasClassifier(build_fn=create_model,**params)
start = timer()
# Perform n_folds cross validation
cv_results = cross_val_score(clf, X, y,
cv=5,
).mean()
run_time = timer() - start
# Loss must be minimized
loss = -cv_results
# Dictionary with information for evaluation
return {
'loss': loss,
'params': params,
'iteration': ITERATION,
'train_time': run_time,
'status': STATUS_OK
}
from hyperopt import fmin, tpe, hp, Trials, STATUS_OK
space = {'units1': hp.choice('units1', [12, 64]),
'dropout': hp.choice('dropout1', [0.25, 0.5]),
'batch_size': hp.choice('batch_size', [10, 20]),
'epochs': hp.choice('nb_epochs', [2, 3]),
'activation': 'relu'
}
global ITERATION
ITERATION = 0
bayes_trials = Trials()
best = fmin(fn = objective, space = space, algo = tpe.suggest,
max_evals = 5, trials = bayes_trials, rstate = np.random.RandomState(50))
这篇关于如何将KerasClassifier,Hyperopt和Sklearn交叉验证放在一起的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!