import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import (RandomTreesEmbedding, RandomForestClassifier,GradientBoostingClassifier)
from sklearn.preprocessing import OneHotEncoder
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_curve
from sklearn.pipeline import make_pipeline
from sklearn.metrics import roc_auc_score
train_agg =pd.read_csv('data/train_agg.csv',sep ='\t')
train_flg =pd.read_csv('data/train_flg.csv',sep ='\t')
test_agg =pd.read_csv('data/test_agg.csv',sep ='\t')
test_flg =pd.read_csv('data/test_flg.csv',sep ='\t')
print (train_agg.shape,train_flg.shape,test_agg.shape,test_flg.shape)
train_df = train_agg.merge(train_flg, on = 'USRID', how = 'left')
test_df = test_agg.merge(test_flg, on = 'USRID', how = 'left')
print (train_df.shape,train_df.columns)
data = train_df.iloc[:,0:-3]
flag = train_df.iloc[:,-1]
test_data = test_df.iloc[:,0:-3]
test_flag = test_df.iloc[:,-1]
basic_lm = LogisticRegression()
basic_lm.fit(data, flag)
y_pred_basic_lm = basic_lm.predict_proba(test_data)[:, 1]
auc_basic_lm = roc_auc_score(test_flag, y_pred_basic_lm)
print("basic lm->auc:",auc_basic_lm)
'''
NET+LR
'''
from keras.models import Sequential
from keras.layers import Dense,Activation
from keras.models import Model
from keras.datasets import reuters
model = Sequential()
model.add(Dense(25, activation='relu', input_dim=29))
model.add(Dense(15, activation='relu',name="Dense_1"))
model.add(Dense(1, activation='sigmoid',name="Dense_2"))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
model.fit(data, flag, epochs=15, batch_size=1000)
#已有的model在load权重过后
#取某一层的输出为输出新建为model,采用函数模型
dense1_layer_model = Model(inputs=model.input,outputs=model.get_layer('Dense_1').output)
#以这个model的预测值作为输出
dense1_train_output = dense1_layer_model.predict(data)
dense1_test_output =dense1_layer_model.predict(test_data)
print(dense1_train_output.shape, dense1_test_output.shape)
net_lm =LogisticRegression()
net_lm.fit(dense1_train_output, flag)
y_pred_net_lm = net_lm.predict_proba(dense1_test_output)[:, 1]
auc_net_lm = roc_auc_score(test_flag, y_pred_net_lm)
print("net+lm->auc:",auc_net_lm)
'''
GBDT+LR
'''
n_estimator =100
grd = GradientBoostingClassifier(n_estimators=n_estimator)
grd_enc = OneHotEncoder()
grd_lm = LogisticRegression()
grd.fit(data, flag)
grd_enc.fit(grd.apply(data)[:, :, 0])
grd_lm.fit(grd_enc.transform(grd.apply(data)[:, :, 0]), flag)
y_pred_grd_lm = grd_lm.predict_proba(grd_enc.transform(grd.apply(test_data)[:, :, 0]))[:, 1]
auc_grd_lm = roc_auc_score(test_flag, y_pred_grd_lm)
print("gbdt+lm->auc:",auc_grd_lm)
'''
XGB+LR
'''
XGB = xgb.XGBClassifier(nthread=4, learning_rate=0.08, n_estimators=100, colsample_bytree=0.5)
XGB.fit(data, flag)
OHE = OneHotEncoder()
OHE.fit(XGB.apply(data))
LR = LogisticRegression(n_jobs=4, C=0.1, penalty='l1')
LR.fit(OHE.transform(XGB.apply(data)), flag)
Y_pred_xgb_lm = LR.predict_proba(OHE.transform(XGB.apply(test_data)))[:, 1]
auc_xgb_lm = roc_auc_score(test_flag, Y_pred_xgb_lm)
print("xgb+lm->auc:",auc_xgb_lm)
print('XGBoost + LogisticRegression: ', auc)
用GBDT、XGboost、神经网络生成新的特征
最新推荐文章于 2024-06-21 13:22:25 发布