首页 > > 详细

辅导Matlab语言、R解析、辅导Matlab、Matlab设计调试、调试Matlab

import numpy as np
import XGBoost as xgb
from bayes_opt import Bayesian Optimization
from sklearn import preprocessing

def xgb_evaluate(min_child_weight,
colsample_bytree,
max_depth,
subsample,
eta,
gamma,
reg_alpha
)
params['min_child_weight'] = int(min_child_weight)
params['cosample_bytree'] = max(min(colsample_bytree, 1), 0)
params['max_depth'] = int(max_depth)
params['subsample'] = max(min(subsample, 1), 0)
params['eta'] = max(eta, 0)
params['gamma']=max(0,gamma)
params['reg_alpha']=max(reg_alpha,1)

cv_result = xgb.cv(params, xgtrain, num_boost_round=num_rounds,
nfold=5,
seed=random_state,
callbacks=[xgb.callback.early_stop(50)])

return -cv_result['test-mae-mean'].values[-1]

def prepare_data():
data = []
label = []
for line in open('./german.txt'):
temp = []
arr = line.strip().split()
for index in range(len(arr) - 1):
temp.append(float(arr[index]))
data.append(temp)
label.append(int(arr[index + 1]) - 1)
X = np.asarray(data)
X=preprocessing.normalize(X)
Y = np.asarray(label)
xgtrain = xgb.DMatrix(X, label=Y)
return xgtrain

xgtrain = prepare_data()
num_rounds = 50
random_state = 2500
num_iter = 250
init_points = 600
params = {
'eta': 0.1,
'silent': 1,
'eval_metric': 'mae',
'verbose_eval': True,
'seed': random_state,
'gamma':0,
'reg_alpha':1,
'max_depth': 3
}
xgb BO = Bayesian Optimization(xgb_evaluate, {'min_child_weight': (1, 20),
'colsample_bytree': (0.1, 1),
'max_depth': (1, 150),
'subsample': (0.1, 1),
'eta': (0, 1),
'gamma':(0,5),
'reg_alpha':(1,100)
})
xgb BO.maximize(init_points=init_points, n_iter=num_iter)


 

联系我们
  • QQ:99515681
  • 邮箱:99515681@qq.com
  • 工作时间:8:00-21:00
  • 微信:codinghelp
热点标签

联系我们 - QQ: 99515681 微信:codinghelp
程序辅导网!