This is how I have trained a xgboost
classifier with a 5-fold cross-validation to optimize the F1 score using randomized search for hyperparameter optimization:
clf_xgb = xgb.XGBClassifier(objective = 'binary:logistic')
param_dist = {'n_estimators': stats.randint(150, 500),
'learning_rate': stats.uniform(0.01, 0.07),
'subsample': stats.uniform(0.3, 0.7),
'max_depth': [3, 4, 5, 6, 7, 8, 9],
'colsample_bytree': stats.uniform(0.5, 0.45),
'min_child_weight': [1, 2, 3]
}
clf = RandomizedSearchCV(clf_xgb, param_distributions = param_dist, n_iter = 25, scoring = 'f1', error_score = 0, verbose = 3, n_jobs = -1)
numFolds = 5
folds = cross_validation.KFold(n = len(X), shuffle = True, n_folds = numFolds)
estimators = []
results = np.zeros(len(X))
score = 0.0
for train_index, test_index in folds:
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
clf.fit(X_train, y_train)
estimators.append(clf.best_estimator_)
results[test_index] = clf.predict(X_test)
score += f1_score(