接上篇:http://blog.csdn.net/mmc2015/article/details/47304591
def xgboost_pred(train,labels,test):
params = {}
params["objective"] = "reg:linear"
params["eta"] = 0.005
params["min_child_weight"] = 6
params["subsample"] = 0.7
params["colsample_bytree"] = 0.7
params["scale_pos_weight"] = 1
params["silent"] = 1
params["max_depth"] = 9
plst = list(params.items())
#Using 5000 rows for early stopping.
offset = 4000
num_rounds = 10000
xgtest = xgb.DMatrix(test)
#create a train and validation dmatrices
xgtrain = xgb.DMatrix(train[offset:,:], label=labels[offset:])
xgval = xgb.DMatrix(train[:offset,:], label=labels[:offset])
#trai