# -*- encoding:utf-8 -*-
#xgboost安装教程 参考 http://blog.csdn.net/lht_okk/article/details/54311333
#xgboost原理参考 http://www.cnblogs.com/mfryf/p/6238185.html
#http://blog.csdn.net/bryan__/article/details/52056112
#xgboost 调参经验 http://blog.csdn.net/u010414589/article/details/51153310
import xgboost as xgb
import numpy as np
#1,xgBoost的基本使用
#2,自定义损失函数的梯度和二阶导
#3,binary:logistic/logitraw
# 定义f:theta*x
#xgboost安装教程 参考 http://blog.csdn.net/lht_okk/article/details/54311333
def log_reg(y_hat, y):
p = 1.0 / (1.0 + np.exp(-y_hat))
g = p - y.get_label()
h = p * (1.0-p)
return g, h
def error_rate(y_hat, y):
return 'error', float(sum(y.get_label() != (y_hat > 0.5))) / len(y_hat)
if __name__=="__main__":
#读取数据
data_train=xgb.DMatrix('14.agaricus_train.txt');
data_test=xgb.DMatrix('14.agaricus_test.txt')
print 'data_train'
print data_train
xgboost使用案例二
最新推荐文章于 2024-01-06 13:57:57 发布