Open nitin0301 opened 4 years ago
Hey, Try using this for MLPRegressor!.
from hpsklearn import HyperoptEstimator, extra_trees
from sklearn.datasets import load_boston
from hyperopt import tpe
import numpy as np
from sklearn.neural_network import MLPRegressor
import hyperopt
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
from sklearn.metrics import r2_score
data = load_boston()
X = data.data
y = data.target
test_size = int(0.2 * len(y))
np.random.seed(13)
indices = np.random.permutation(len(X))
X_train = X[indices[:-test_size]]
y_train = y[indices[:-test_size]]
X_test = X[indices[-test_size:]]
y_test = y[indices[-test_size:]]
space={'hidden_layer_sizes': hp.choice('hidden_layer_sizes', [8, 16, 32, (8, 8), (16, 16)]),
'activation': hp.choice('activation', ['relu', 'tanh']),
'max_iter': hp.choice('max_iter', [3000])
}
def hyperparameter_tuning(space):
model = MLPRegressor(hidden_layer_sizes = space['hidden_layer_sizes'], max_iter = int(space['max_iter']), activation = space['activation'])
evaluation = [(X_train, y_train), (X_test, y_test)]
model.fit(X_train, y_train)
pred = model.predict(X_test)
r2 = r2_score(y_test, pred)
print("SCORE:", r2)
return {'loss': 1-r2, 'status': STATUS_OK, 'model': model}
trials = Trials()
best = fmin(fn = hyperparameter_tuning, space = space, algo = tpe.suggest,
max_evals = 50, trials = trials)
best
Could you please add MLPRegressor and BayesianRidge Regressor in the search space? Thanks.