金融反欺诈

## 1. Data Lending Club 2016年Q3数据:https://www.lendingclub.com/info/download-data.action 参考:http://kldavenport.com/lending-club-data-analysis-revisted-with-python/
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
df = pd.read_csv("./LoanStats_2016Q3.csv",skiprows=1,low_memory=False)
df.info()
df.head(3)
idmember_idloan_amntfunded_amntfunded_amnt_invtermint_rateinstallmentgradesub_gradesec_app_earliest_cr_linesec_app_inq_last_6mthssec_app_mort_accsec_app_open_accsec_app_revol_utilsec_app_open_il_6msec_app_num_rev_acctssec_app_chargeoff_within_12_mthssec_app_collections_12_mths_ex_medsec_app_mths_since_last_major_derog
0NaNNaN15000.015000.015000.036 months13.99%512.60CC3NaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
1NaNNaN2600.02600.02600.036 months8.99%82.67BB1NaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
2NaNNaN32200.032200.032200.060 months21.49%880.02DD5NaNNaNNaNNaNNaNNaNNaNNaNNaNNaN

3 rows × 122 columns

## 2. Keep what we need
# .ix[row slice, column slice] 
df.ix[:4,:7]
idmember_idloan_amntfunded_amntfunded_amnt_invtermint_rate
0NaNNaN15000.015000.015000.036 months13.99%
1NaNNaN2600.02600.02600.036 months8.99%
2NaNNaN32200.032200.032200.060 months21.49%
3NaNNaN10000.010000.010000.036 months11.49%
4NaNNaN6000.06000.06000.036 months13.49%
df.drop('id',1,inplace=True)
df.drop('member_id',1,inplace=True)
df.int_rate = pd.Series(df.int_rate).str.replace('%', '').astype(float)
df.ix[:4,:7]
loan_amntfunded_amntfunded_amnt_invtermint_rateinstallmentgrade
015000.015000.015000.036 months13.99512.60C
12600.02600.02600.036 months8.9982.67B
232200.032200.032200.060 months21.49880.02D
310000.010000.010000.036 months11.49329.72B
46000.06000.06000.036 months13.49203.59C
### Loan Amount Requested Verus the Funded Amount
print (df.loan_amnt != df.funded_amnt).value_counts()
False 99120 True 4 dtype: int64
df.query('loan_amnt != funded_amnt').head(5)
loan_amntfunded_amntfunded_amnt_invtermint_rateinstallmentgradesub_gradeemp_titleemp_lengthsec_app_earliest_cr_linesec_app_inq_last_6mthssec_app_mort_accsec_app_open_accsec_app_revol_utilsec_app_open_il_6msec_app_num_rev_acctssec_app_chargeoff_within_12_mthssec_app_collections_12_mths_ex_medsec_app_mths_since_last_major_derog
99120NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
99121NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
99122NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN
99123NaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaNNaN

4 rows × 120 columns

df.dropna(axis=0, how='all',inplace=True)
df.info()
df.dropna(axis=1, how='all',inplace=True)
df.info()
df.ix[:5,8:15]
emp_titleemp_lengthhome_ownershipannual_incverification_statusissue_dloan_status
0Fiscal Director2 yearsRENT55000.0Not VerifiedSep-16Current
1Loaner Coordinator3 yearsRENT35000.0Source VerifiedSep-16Fully Paid
2warehouse/supervisor10+ yearsMORTGAGE65000.0Not VerifiedSep-16Fully Paid
3Teacher10+ yearsOWN55900.0Not VerifiedSep-16Current
4SERVICE MGR5 yearsRENT33000.0Not VerifiedSep-16Current
5General Manager10+ yearsMORTGAGE109000.0Source VerifiedSep-16Current
### emp_title: employment title
print df.emp_title.value_counts().head()
print df.emp_title.value_counts().tail()
df.emp_title.unique().shape
Teacher 1931 Manager 1701 Owner 990 Supervisor 785 Driver 756 Name: emp_title, dtype: int64 Agent Services Representative 1 Operator Bridge Tunnel 1 Reg Medical Assistant/Referral Spec. 1 Home Health Care 1 rounds cook 1 Name: emp_title, dtype: int64 (37421,)
df.drop(['emp_title'],1, inplace=True)
df.ix[:5,8:15]
emp_lengthhome_ownershipannual_incverification_statusissue_dloan_statuspymnt_plan
02 yearsRENT55000.0Not VerifiedSep-16Currentn
13 yearsRENT35000.0Source VerifiedSep-16Fully Paidn
210+ yearsMORTGAGE65000.0Not VerifiedSep-16Fully Paidn
310+ yearsOWN55900.0Not VerifiedSep-16Currentn
45 yearsRENT33000.0Not VerifiedSep-16Currentn
510+ yearsMORTGAGE109000.0Source VerifiedSep-16Currentn
### emp_length: employment length
df.emp_length.value_counts()
10+ years 34219 2 years 9066 3 years 7925
df.replace('n/a', np.nan,inplace=True)
df.emp_length.fillna(value=0,inplace=True)
df['emp_length'].replace(to_replace='[^0-9]+', value='', inplace=True, regex=True)
df['emp_length'] = df['emp_length'].astype(int)
df.emp_length.value_counts()
10 34219 1 14095 2 9066 3 7925 5 6170 4 6022 0 5922 6 4406 8 4168 9 3922 7 3205 Name: emp_length, dtype: int64 ### verification status:”Indicates if income was verified by LC, not verified, or if the income source was verified”
df.verification_status.value_counts()
Source Verified 40781 Verified 31356 Not Verified 26983 Name: verification_status, dtype: int64 ### Target: Loan Statuses
df.info()
df.columns
Index([u’loan_amnt’, u’funded_amnt’, u’funded_amnt_inv’, u’term’, u’int_rate’, u’installment’, u’grade’, u’sub_grade’, u’emp_length’, u’home_ownership’, … u’num_tl_90g_dpd_24m’, u’num_tl_op_past_12m’, u’pct_tl_nvr_dlq’, u’percent_bc_gt_75’, u’pub_rec_bankruptcies’, u’tax_liens’, u’tot_hi_cred_lim’, u’total_bal_ex_mort’, u’total_bc_limit’, u’total_il_high_credit_limit’], dtype=’object’, length=107)
pd.unique(df['loan_status'].values.ravel())
array([‘Current’, ‘Fully Paid’, ‘Late (31-120 days)’, ‘Charged Off’, ‘Late (16-30 days)’, ‘In Grace Period’, ‘Default’], dtype=object)
for col in df.select_dtypes(include=['object']).columns:
    print ("Column {} has {} unique instances".format( col, len(df[col].unique())) )
Column term has 2 unique instances Column grade has 7 unique instances Column sub_grade has 35 unique instances Column home_ownership has 4 unique instances Column verification_status has 3 unique instances Column issue_d has 3 unique instances Column loan_status has 7 unique instances Column pymnt_plan has 2 unique instances Column desc has 6 unique instances Column purpose has 13 unique instances Column title has 13 unique instances Column zip_code has 873 unique instances Column addr_state has 50 unique instances Column earliest_cr_line has 614 unique instances Column revol_util has 1087 unique instances Column initial_list_status has 2 unique instances Column last_pymnt_d has 13 unique instances Column next_pymnt_d has 4 unique instances Column last_credit_pull_d has 14 unique instances Column application_type has 3 unique instances Column verification_status_joint has 2 unique instances
# 处理对象类型的缺失,unique
df.select_dtypes(include=['O']).describe().T.\
assign(missing_pct=df.apply(lambda x : (len(x)-x.count())/float(len(x))))
countuniquetopfreqmissing_pct
term99120236 months738980.000000
grade991207C328460.000000
sub_grade9912035B583220.000000
home_ownership991204MORTGAGE467610.000000
verification_status991203Source Verified407810.000000
issue_d991203Aug-16362800.000000
loan_status991207Current794450.000000
pymnt_plan991202n990740.000000
desc6520.999939
purpose9912013debt_consolidation576820.000000
title9369312Debt consolidation539990.054752
zip_code99120873112xx11250.000000
addr_state9912050CA133520.000000
earliest_cr_line99120614Aug-037960.000000
revol_util9906010860%4400.000605
initial_list_status991202w718690.000000
last_pymnt_d9899112Jun-17810820.001301
next_pymnt_d835523Jul-17835270.157062
last_credit_pull_d9911513Jun-17892800.000050
application_type991203INDIVIDUAL985650.000000
verification_status_joint5171Not Verified5170.994784
df.revol_util = pd.Series(df.revol_util).str.replace('%', '').astype(float)
# missing_pct
df.drop('desc',1,inplace=True)
df.drop('verification_status_joint',1,inplace=True)
df.drop('zip_code',1,inplace=True)
df.drop('addr_state',1,inplace=True)
df.drop('earliest_cr_line',1,inplace=True)
df.drop('revol_util',1,inplace=True)
df.drop('purpose',1,inplace=True)
df.drop('title',1,inplace=True)
df.drop('term',1,inplace=True)
df.drop('issue_d',1,inplace=True)
# df.drop('',1,inplace=True)
# 贷后相关的字段
df.drop(['out_prncp','out_prncp_inv','total_pymnt',
         'total_pymnt_inv','total_rec_prncp', 'grade', 'sub_grade'] ,1, inplace=True)
df.drop(['total_rec_int','total_rec_late_fee',
         'recoveries','collection_recovery_fee',
         'collection_recovery_fee' ],1, inplace=True)
df.drop(['last_pymnt_d','last_pymnt_amnt',
         'next_pymnt_d','last_credit_pull_d'],1, inplace=True)
df.drop(['policy_code'],1, inplace=True)
df.info()
df.ix[:5,:10]
loan_amntfunded_amntfunded_amnt_invint_rateinstallmentemp_lengthhome_ownershipannual_incverification_statusloan_status
015000.015000.015000.013.99512.602RENT55000.0Not VerifiedCurrent
12600.02600.02600.08.9982.673RENT35000.0Source VerifiedFully Paid
232200.032200.032200.021.49880.0210MORTGAGE65000.0Not VerifiedFully Paid
310000.010000.010000.011.49329.7210OWN55900.0Not VerifiedCurrent
46000.06000.06000.013.49203.595RENT33000.0Not VerifiedCurrent
530000.030000.030000.013.99697.9010MORTGAGE109000.0Source VerifiedCurrent
df.ix[:5,10:21]
pymnt_plandtidelinq_2yrsinq_last_6mthsmths_since_last_delinqmths_since_last_recordopen_accpub_recrevol_baltotal_accinitial_list_status
0n23.781.00.07.0NaN22.00.021345.043.0f
1n6.730.00.0NaNNaN14.00.0720.024.0w
2n11.710.01.0NaN87.017.01.011987.034.0w
3n26.210.02.0NaNNaN15.00.017209.062.0w
4n19.050.00.0NaNNaN3.00.04576.011.0f
5n16.240.00.0NaNNaN17.00.011337.039.0w
print df.columns
print df.head(1).values
df.info()
Index([u’loan_amnt’, u’funded_amnt’, u’funded_amnt_inv’, u’int_rate’, u’installment’, u’emp_length’, u’home_ownership’, u’annual_inc’, u’verification_status’, u’loan_status’, u’pymnt_plan’, u’dti’, u’delinq_2yrs’, u’inq_last_6mths’, u’mths_since_last_delinq’, u’mths_since_last_record’, u’open_acc’, u’pub_rec’, u’revol_bal’, u’total_acc’, u’initial_list_status’, u’collections_12_mths_ex_med’, u’mths_since_last_major_derog’, u’application_type’, u’annual_inc_joint’, u’dti_joint’, u’acc_now_delinq’, u’tot_coll_amt’, u’tot_cur_bal’, u’open_acc_6m’, u’open_il_6m’, u’open_il_12m’, u’open_il_24m’, u’mths_since_rcnt_il’, u’total_bal_il’, u’il_util’, u’open_rv_12m’, u’open_rv_24m’, u’max_bal_bc’, u’all_util’, u’total_rev_hi_lim’, u’inq_fi’, u’total_cu_tl’, u’inq_last_12m’, u’acc_open_past_24mths’, u’avg_cur_bal’, u’bc_open_to_buy’, u’bc_util’, u’chargeoff_within_12_mths’, u’delinq_amnt’, u’mo_sin_old_il_acct’, u’mo_sin_old_rev_tl_op’, u’mo_sin_rcnt_rev_tl_op’, u’mo_sin_rcnt_tl’, u’mort_acc’, u’mths_since_recent_bc’, u’mths_since_recent_bc_dlq’, u’mths_since_recent_inq’, u’mths_since_recent_revol_delinq’, u’num_accts_ever_120_pd’, u’num_actv_bc_tl’, u’num_actv_rev_tl’, u’num_bc_sats’, u’num_bc_tl’, u’num_il_tl’, u’num_op_rev_tl’, u’num_rev_accts’, u’num_rev_tl_bal_gt_0’, u’num_sats’, u’num_tl_120dpd_2m’, u’num_tl_30dpd’, u’num_tl_90g_dpd_24m’, u’num_tl_op_past_12m’, u’pct_tl_nvr_dlq’, u’percent_bc_gt_75’, u’pub_rec_bankruptcies’, u’tax_liens’, u’tot_hi_cred_lim’, u’total_bal_ex_mort’, u’total_bc_limit’, u’total_il_high_credit_limit’], dtype=’object’) [[15000.0 15000.0 15000.0 13.99 512.6 2 ‘RENT’ 55000.0 ‘Not Verified’ ‘Current’ ‘n’ 23.78 1.0 0.0 7.0 nan 22.0 0.0 21345.0 43.0 ‘f’ 0.0 nan ‘INDIVIDUAL’ nan nan 0.0 0.0 140492.0 3.0 10.0 2.0 3.0 11.0 119147.0 101.0 3.0 4.0 14612.0 83.0 39000.0 1.0 6.0 0.0 7.0 6386.0 9645.0 73.1 0.0 0.0 157.0 248.0 4.0 4.0 0.0 4.0 7.0 22.0 7.0 0.0 5.0 9.0 6.0 7.0 25.0 11.0 18.0 9.0 22.0 0.0 0.0 0.0 5.0 100.0 33.3 0.0 0.0 147587.0 140492.0 30200.0 108587.0]]
df.select_dtypes(include=['float']).describe().T.\
assign(missing_pct=df.apply(lambda x : (len(x)-x.count())/float(len(x))))
/Users/ting/anaconda/lib/python2.7/site-packages/numpy/lib/function_base.py:3834: RuntimeWarning: Invalid value encountered in percentile RuntimeWarning)
countmeanstdmin25%50%75%maxmissing_pct
loan_amnt99120.014170.5705218886.1387581000.007200.0012000.0020000.0040000.000.000000
funded_amnt99120.014170.5705218886.1387581000.007200.0012000.0020000.0040000.000.000000
funded_amnt_inv99120.014166.0878238883.3013281000.007200.0012000.0020000.0040000.000.000000
int_rate99120.013.7236414.8739105.3210.4912.7915.5930.990.000000
installment99120.0432.718654272.67859630.12235.24361.38569.831535.710.000000
annual_inc99120.078488.85008172694.1860600.0048000.0065448.0094000.008400000.000.000000
dti99120.018.34865164.0576030.0011.9117.6023.909999.000.000000
delinq_2yrs99120.00.3819010.9889960.000.000.000.0021.000.000000
inq_last_6mths99120.00.5705210.8637960.000.000.001.005.000.000000
mths_since_last_delinq53366.033.22917221.8204070.00NaNNaNNaN142.000.461602
mths_since_last_record19792.067.26788624.3793430.00NaNNaNNaN119.000.800323
open_acc99120.011.7182515.7305851.008.0011.0015.0086.000.000000
pub_rec99120.00.2665960.7191930.000.000.000.0061.000.000000
revol_bal99120.015536.62804721537.7905990.005657.0010494.0018501.50876178.000.000000
total_acc99120.024.03354511.9297612.0015.0022.0031.00119.000.000000
collections_12_mths_ex_med99120.00.0216400.1683310.000.000.000.0010.000.000000
mths_since_last_major_derog29372.044.44961222.2545290.00NaNNaNNaN165.000.703672
annual_inc_joint517.0118120.41847251131.32381926943.12NaNNaNNaN400000.000.994784
dti_joint517.018.6376216.6020162.56NaNNaNNaN48.580.994784
acc_now_delinq99120.00.0067090.0869020.000.000.000.004.000.000000
tot_coll_amt99120.0281.7976391840.6994430.000.000.000.00172575.000.000000
tot_cur_bal99120.0138845.606144156736.8435910.0028689.0076447.50207194.753764968.000.000000
open_acc_6m99120.00.9787431.1769730.000.001.002.0013.000.000000
open_il_6m99120.02.8258883.1092250.001.002.003.0043.000.000000
open_il_12m99120.00.7234670.9738880.000.000.001.0013.000.000000
open_il_24m99120.01.6248181.6566280.000.001.002.0026.000.000000
mths_since_rcnt_il96469.021.36253126.5634550.00NaNNaNNaN503.000.026745
total_bal_il99120.035045.32419341981.6179960.009179.0023199.0045672.001547285.000.000000
il_util85480.071.59915823.3067310.00NaNNaNNaN1000.000.137611
open_rv_12m99120.01.4081421.5700680.000.001.002.0024.000.000000
mo_sin_old_rev_tl_op99120.0177.63432295.3274983.00115.00160.00227.00901.000.000000
mo_sin_rcnt_rev_tl_op99120.013.14536916.6950220.003.008.0016.00274.000.000000
mo_sin_rcnt_tl99120.07.8332328.6498430.003.005.0010.00268.000.000000
mort_acc99120.01.4675851.7995130.000.001.002.0045.000.000000
mths_since_recent_bc98067.023.62351231.7506320.00NaNNaNNaN546.000.010623
mths_since_recent_bc_dlq26018.038.09528022.7982290.00NaNNaNNaN162.000.737510
mths_since_recent_inq89254.06.6265045.9676480.00NaNNaNNaN25.000.099536
mths_since_recent_revol_delinq36606.034.39313222.3718130.00NaNNaNNaN165.000.630690
num_accts_ever_120_pd99120.00.5947031.5080270.000.000.001.0036.000.000000
num_actv_bc_tl99120.03.6282182.3026680.002.003.005.0047.000.000000
num_actv_rev_tl99120.05.6252723.4001850.003.005.007.0059.000.000000
num_bc_sats99120.04.6455813.0133990.003.004.006.0061.000.000000
num_bc_tl99120.07.4160414.5461120.004.007.0010.0067.000.000000
num_il_tl99120.08.5974377.5285330.004.007.0011.00107.000.000000
num_op_rev_tl99120.08.1988204.7103480.005.007.0010.0079.000.000000
num_rev_accts99120.013.7263127.9637912.008.0012.0018.00104.000.000000
num_rev_tl_bal_gt_099120.05.5662933.2861350.003.005.007.0059.000.000000
num_sats99120.011.6734975.7095131.008.0011.0014.0085.000.000000
num_tl_120dpd_2m95661.00.0011080.0356950.00NaNNaNNaN4.000.034897
num_tl_30dpd99120.00.0043480.0686500.000.000.000.003.000.000000
num_tl_90g_dpd_24m99120.00.1013320.5671120.000.000.000.0020.000.000000
num_tl_op_past_12m99120.02.2547521.9600840.001.002.003.0024.000.000000
pct_tl_nvr_dlq99120.093.2628289.6966460.0090.0096.90100.00100.000.000000
percent_bc_gt_7598006.042.68133236.2964250.00NaNNaNNaN100.000.011239
pub_rec_bankruptcies99120.00.1502620.4077060.000.000.000.008.000.000000
tax_liens99120.00.0753930.5172750.000.000.000.0061.000.000000
tot_hi_cred_lim99120.0172185.283394175273.6696522500.0049130.75108020.50248473.253953111.000.000000
total_bal_ex_mort99120.050818.69407848976.6404780.0020913.0037747.5064216.251548128.000.000000
total_bc_limit99120.020862.22842020721.9006640.007700.0014700.0027000.00520500.000.000000
total_il_high_credit_limit99120.044066.34037544473.4587300.0015750.0033183.0058963.252000000.000.000000

74 rows × 9 columns

df.drop('annual_inc_joint',1,inplace=True)
df.drop('dti_joint',1,inplace=True)
df.select_dtypes(include=['int']).describe().T.\
assign(missing_pct=df.apply(lambda x : (len(x)-x.count())/float(len(x))))
countmeanstdmin25%50%75%maxmissing_pct
emp_length99120.05.7570923.7703590.02.06.010.010.00.0

Target: Loan Statuses

df['loan_status'].value_counts()
# .plot(kind='bar')
Current               79445
Fully Paid            13066
Charged Off            2502
Late (31-120 days)     2245
In Grace Period        1407
Late (16-30 days)       454
Default                   1
Name: loan_status, dtype: int64
df.loan_status.replace('Fully Paid', int(1),inplace=True)
df.loan_status.replace('Current', int(1),inplace=True)
df.loan_status.replace('Late (16-30 days)', int(0),inplace=True)
df.loan_status.replace('Late (31-120 days)', int(0),inplace=True)
df.loan_status.replace('Charged Off', np.nan,inplace=True)
df.loan_status.replace('In Grace Period', np.nan,inplace=True)
df.loan_status.replace('Default', np.nan,inplace=True)
# df.loan_status.astype('int')
df.loan_status.value_counts()
1.0    92511
0.0     2699
Name: loan_status, dtype: int64
# df.loan_status
df.dropna(subset=['loan_status'],inplace=True)

Highly Correlated Data

cor = df.corr()
cor.loc[:,:] = np.tril(cor, k=-1) # below main lower triangle of an array
cor = cor.stack()
cor[(cor > 0.55) | (cor < -0.55)]
funded_amnt                     loan_amnt                      1.000000
funded_amnt_inv                 loan_amnt                      0.999994
                                funded_amnt                    0.999994
installment                     loan_amnt                      0.953380
                                funded_amnt                    0.953380
                                funded_amnt_inv                0.953293
mths_since_last_delinq          delinq_2yrs                   -0.551275
total_acc                       open_acc                       0.722950
mths_since_last_major_derog     mths_since_last_delinq         0.685642
open_il_24m                     open_il_12m                    0.760219
total_bal_il                    open_il_6m                     0.566551
open_rv_12m                     open_acc_6m                    0.623975
open_rv_24m                     open_rv_12m                    0.774954
max_bal_bc                      revol_bal                      0.551409
all_util                        il_util                        0.594925
total_rev_hi_lim                revol_bal                      0.815351
inq_last_12m                    inq_fi                         0.563011
acc_open_past_24mths            open_acc_6m                    0.553181
                                open_il_24m                    0.570853
                                open_rv_12m                    0.657606
                                open_rv_24m                    0.848964
avg_cur_bal                     tot_cur_bal                    0.828457
bc_open_to_buy                  total_rev_hi_lim               0.626380
bc_util                         all_util                       0.569469
mo_sin_rcnt_tl                  mo_sin_rcnt_rev_tl_op          0.606065
mort_acc                        tot_cur_bal                    0.551198
mths_since_recent_bc            mo_sin_rcnt_rev_tl_op          0.614262
mths_since_recent_bc_dlq        mths_since_last_delinq         0.751613
                                mths_since_last_major_derog    0.553022
mths_since_recent_revol_delinq  mths_since_last_delinq         0.853573
                                                                 ...   
num_sats                        total_acc                      0.720022
                                num_actv_bc_tl                 0.552957
                                num_actv_rev_tl                0.665429
                                num_bc_sats                    0.630778
                                num_op_rev_tl                  0.826946
                                num_rev_accts                  0.663595
                                num_rev_tl_bal_gt_0            0.668573
num_tl_30dpd                    acc_now_delinq                 0.801444
num_tl_90g_dpd_24m              delinq_2yrs                    0.669267
num_tl_op_past_12m              open_acc_6m                    0.722131
                                open_il_12m                    0.557902
                                open_rv_12m                    0.844841
                                open_rv_24m                    0.660265
                                acc_open_past_24mths           0.774867
pct_tl_nvr_dlq                  num_accts_ever_120_pd         -0.592502
percent_bc_gt_75                bc_util                        0.844108
pub_rec_bankruptcies            pub_rec                        0.580798
tax_liens                       pub_rec                        0.752084
tot_hi_cred_lim                 tot_cur_bal                    0.982693
                                avg_cur_bal                    0.795652
                                mort_acc                       0.560840
total_bal_ex_mort               total_bal_il                   0.902486
total_bc_limit                  max_bal_bc                     0.581536
                                total_rev_hi_lim               0.775151
                                bc_open_to_buy                 0.834159
                                num_bc_sats                    0.633461
total_il_high_credit_limit      open_il_6m                     0.552023
                                total_bal_il                   0.960349
                                num_il_tl                      0.583329
                                total_bal_ex_mort              0.889238
dtype: float64
df.drop(['funded_amnt','funded_amnt_inv', 'installment'], axis=1, inplace=True)

2. Our Model

from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn import ensemble
from sklearn.preprocessing import OneHotEncoder #https://ljalphabeta.gitbooks.io/python-/content/categorical_data.html
Y = df.loan_status
X = df.drop('loan_status',1,inplace=False)
print Y.shape
print sum(Y)
(95210,)
92511.0
X = pd.get_dummies(X)
print X.columns
print X.head(1).values
X.info()
Index([u'loan_amnt', u'int_rate', u'emp_length', u'annual_inc', u'dti',
       u'delinq_2yrs', u'inq_last_6mths', u'mths_since_last_delinq',
       u'mths_since_last_record', u'open_acc', u'pub_rec', u'revol_bal',
       u'total_acc', u'collections_12_mths_ex_med',
       u'mths_since_last_major_derog', u'acc_now_delinq', u'tot_coll_amt',
       u'tot_cur_bal', u'open_acc_6m', u'open_il_6m', u'open_il_12m',
       u'open_il_24m', u'mths_since_rcnt_il', u'total_bal_il', u'il_util',
       u'open_rv_12m', u'open_rv_24m', u'max_bal_bc', u'all_util',
       u'total_rev_hi_lim', u'inq_fi', u'total_cu_tl', u'inq_last_12m',
       u'acc_open_past_24mths', u'avg_cur_bal', u'bc_open_to_buy', u'bc_util',
       u'chargeoff_within_12_mths', u'delinq_amnt', u'mo_sin_old_il_acct',
       u'mo_sin_old_rev_tl_op', u'mo_sin_rcnt_rev_tl_op', u'mo_sin_rcnt_tl',
       u'mort_acc', u'mths_since_recent_bc', u'mths_since_recent_bc_dlq',
       u'mths_since_recent_inq', u'mths_since_recent_revol_delinq',
       u'num_accts_ever_120_pd', u'num_actv_bc_tl', u'num_actv_rev_tl',
       u'num_bc_sats', u'num_bc_tl', u'num_il_tl', u'num_op_rev_tl',
       u'num_rev_accts', u'num_rev_tl_bal_gt_0', u'num_sats',
       u'num_tl_120dpd_2m', u'num_tl_30dpd', u'num_tl_90g_dpd_24m',
       u'num_tl_op_past_12m', u'pct_tl_nvr_dlq', u'percent_bc_gt_75',
       u'pub_rec_bankruptcies', u'tax_liens', u'tot_hi_cred_lim',
       u'total_bal_ex_mort', u'total_bc_limit', u'total_il_high_credit_limit',
       u'home_ownership_ANY', u'home_ownership_MORTGAGE',
       u'home_ownership_OWN', u'home_ownership_RENT',
       u'verification_status_Not Verified',
       u'verification_status_Source Verified', u'verification_status_Verified',
       u'pymnt_plan_n', u'pymnt_plan_y', u'initial_list_status_f',
       u'initial_list_status_w', u'application_type_DIRECT_PAY',
       u'application_type_INDIVIDUAL', u'application_type_JOINT'],
      dtype='object')
[[  1.50000000e+04   1.39900000e+01   2.00000000e+00   5.50000000e+04
    2.37800000e+01   1.00000000e+00   0.00000000e+00   7.00000000e+00
               nan   2.20000000e+01   0.00000000e+00   2.13450000e+04
    4.30000000e+01   0.00000000e+00              nan   0.00000000e+00
    0.00000000e+00   1.40492000e+05   3.00000000e+00   1.00000000e+01
    2.00000000e+00   3.00000000e+00   1.10000000e+01   1.19147000e+05
    1.01000000e+02   3.00000000e+00   4.00000000e+00   1.46120000e+04
    8.30000000e+01   3.90000000e+04   1.00000000e+00   6.00000000e+00
    0.00000000e+00   7.00000000e+00   6.38600000e+03   9.64500000e+03
    7.31000000e+01   0.00000000e+00   0.00000000e+00   1.57000000e+02
    2.48000000e+02   4.00000000e+00   4.00000000e+00   0.00000000e+00
    4.00000000e+00   7.00000000e+00   2.20000000e+01   7.00000000e+00
    0.00000000e+00   5.00000000e+00   9.00000000e+00   6.00000000e+00
    7.00000000e+00   2.50000000e+01   1.10000000e+01   1.80000000e+01
    9.00000000e+00   2.20000000e+01   0.00000000e+00   0.00000000e+00
    0.00000000e+00   5.00000000e+00   1.00000000e+02   3.33000000e+01
    0.00000000e+00   0.00000000e+00   1.47587000e+05   1.40492000e+05
    3.02000000e+04   1.08587000e+05   0.00000000e+00   0.00000000e+00
    0.00000000e+00   1.00000000e+00   1.00000000e+00   0.00000000e+00
    0.00000000e+00   1.00000000e+00   0.00000000e+00   1.00000000e+00
    0.00000000e+00   0.00000000e+00   1.00000000e+00   0.00000000e+00]]
<class 'pandas.core.frame.DataFrame'>
Int64Index: 95210 entries, 0 to 99119
Data columns (total 84 columns):
loan_amnt                              95210 non-null float64
int_rate                               95210 non-null float64
emp_length                             95210 non-null int64
annual_inc                             95210 non-null float64
dti                                    95210 non-null float64
delinq_2yrs                            95210 non-null float64
inq_last_6mths                         95210 non-null float64
mths_since_last_delinq                 51229 non-null float64
mths_since_last_record                 18903 non-null float64
open_acc                               95210 non-null float64
pub_rec                                95210 non-null float64
revol_bal                              95210 non-null float64
total_acc                              95210 non-null float64
collections_12_mths_ex_med             95210 non-null float64
mths_since_last_major_derog            28125 non-null float64
acc_now_delinq                         95210 non-null float64
tot_coll_amt                           95210 non-null float64
tot_cur_bal                            95210 non-null float64
open_acc_6m                            95210 non-null float64
open_il_6m                             95210 non-null float64
open_il_12m                            95210 non-null float64
open_il_24m                            95210 non-null float64
mths_since_rcnt_il                     92660 non-null float64
total_bal_il                           95210 non-null float64
il_util                                82017 non-null float64
open_rv_12m                            95210 non-null float64
open_rv_24m                            95210 non-null float64
max_bal_bc                             95210 non-null float64
all_util                               95204 non-null float64
total_rev_hi_lim                       95210 non-null float64
inq_fi                                 95210 non-null float64
total_cu_tl                            95210 non-null float64
inq_last_12m                           95210 non-null float64
acc_open_past_24mths                   95210 non-null float64
avg_cur_bal                            95210 non-null float64
bc_open_to_buy                         94160 non-null float64
bc_util                                94126 non-null float64
chargeoff_within_12_mths               95210 non-null float64
delinq_amnt                            95210 non-null float64
mo_sin_old_il_acct                     92660 non-null float64
mo_sin_old_rev_tl_op                   95210 non-null float64
mo_sin_rcnt_rev_tl_op                  95210 non-null float64
mo_sin_rcnt_tl                         95210 non-null float64
mort_acc                               95210 non-null float64
mths_since_recent_bc                   94212 non-null float64
mths_since_recent_bc_dlq               24968 non-null float64
mths_since_recent_inq                  85581 non-null float64
mths_since_recent_revol_delinq         35158 non-null float64
num_accts_ever_120_pd                  95210 non-null float64
num_actv_bc_tl                         95210 non-null float64
num_actv_rev_tl                        95210 non-null float64
num_bc_sats                            95210 non-null float64
num_bc_tl                              95210 non-null float64
num_il_tl                              95210 non-null float64
num_op_rev_tl                          95210 non-null float64
num_rev_accts                          95210 non-null float64
num_rev_tl_bal_gt_0                    95210 non-null float64
num_sats                               95210 non-null float64
num_tl_120dpd_2m                       91951 non-null float64
num_tl_30dpd                           95210 non-null float64
num_tl_90g_dpd_24m                     95210 non-null float64
num_tl_op_past_12m                     95210 non-null float64
pct_tl_nvr_dlq                         95210 non-null float64
percent_bc_gt_75                       94156 non-null float64
pub_rec_bankruptcies                   95210 non-null float64
tax_liens                              95210 non-null float64
tot_hi_cred_lim                        95210 non-null float64
total_bal_ex_mort                      95210 non-null float64
total_bc_limit                         95210 non-null float64
total_il_high_credit_limit             95210 non-null float64
home_ownership_ANY                     95210 non-null float64
home_ownership_MORTGAGE                95210 non-null float64
home_ownership_OWN                     95210 non-null float64
home_ownership_RENT                    95210 non-null float64
verification_status_Not Verified       95210 non-null float64
verification_status_Source Verified    95210 non-null float64
verification_status_Verified           95210 non-null float64
pymnt_plan_n                           95210 non-null float64
pymnt_plan_y                           95210 non-null float64
initial_list_status_f                  95210 non-null float64
initial_list_status_w                  95210 non-null float64
application_type_DIRECT_PAY            95210 non-null float64
application_type_INDIVIDUAL            95210 non-null float64
application_type_JOINT                 95210 non-null float64
dtypes: float64(83), int64(1)
memory usage: 61.7 MB
X.fillna(0.0,inplace=True)
X.fillna(0,inplace=True)

Train Data & Test Data

x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=.3, random_state=123)
print(x_train.shape)
print(y_train.shape)
print(x_test.shape)
print(y_test.shape)
(66647, 84)
(66647,)
(28563, 84)
(28563,)
print y_train.value_counts()
print y_test.value_counts()
1.0    64712
0.0     1935
Name: loan_status, dtype: int64
1.0    27799
0.0      764
Name: loan_status, dtype: int64

Gradient Boosting Regression Tree

# param_grid = {'learning_rate': [0.1, 0.05, 0.02, 0.01],
#               'max_depth': [1,2,3,4],
#               'min_samples_split': [50,100,200,400],
#               'n_estimators': [100,200,400,800]
#               }

param_grid = {'learning_rate': [0.1],
              'max_depth': [2],
              'min_samples_split': [50,100],
              'n_estimators': [100,200]
              }
# param_grid = {'learning_rate': [0.1],
#               'max_depth': [4],
#               'min_samples_leaf': [3],
#               'max_features': [1.0],
#               }

est = GridSearchCV(ensemble.GradientBoostingRegressor(),
                   param_grid, n_jobs=4, refit=True)

est.fit(x_train, y_train)

best_params = est.best_params_
print best_params
print best_params
{'min_samples_split': 100, 'n_estimators': 100, 'learning_rate': 0.1, 'max_depth': 3}
%%time
est = ensemble.GradientBoostingRegressor(min_samples_split=50,n_estimators=300,
                                         learning_rate=0.1,max_depth=1, random_state=0,loss='ls').\
fit(x_train, y_train)
CPU times: user 24.2 s, sys: 251 ms, total: 24.4 s
Wall time: 25.6 s
est.score(x_test,y_test)
0.028311715416075908
%%time
est = ensemble.GradientBoostingRegressor(min_samples_split=50,n_estimators=100,
                                         learning_rate=0.1,max_depth=2, random_state=0,loss='ls').\
fit(x_train, y_train)
CPU times: user 20 s, sys: 272 ms, total: 20.3 s
Wall time: 21.6 s
est.score(x_test,y_test)
0.029210266192750467
def compute_ks(data):

    sorted_list = data.sort_values(['predict'], ascending=[True])

    total_bad = sorted_list['label'].sum(axis=None, skipna=None, level=None, numeric_only=None) / 3
    total_good = sorted_list.shape[0] - total_bad

    # print "total_bad = ", total_bad
    # print "total_good = ", total_good

    max_ks = 0.0
    good_count = 0.0
    bad_count = 0.0
    for index, row in sorted_list.iterrows():
        if row['label'] == 3:
            bad_count += 1.0
        else:
            good_count += 1.0

        val = bad_count/total_bad - good_count/total_good
        max_ks = max(max_ks, val)

    return max_ks
test_pd = pd.DataFrame()
test_pd['predict'] = est.predict(x_test)
test_pd['label'] = y_test
# df['predict'] = est.predict(x_test)
print compute_ks(test_pd[['label','predict']])
0.0
# Top Ten
feature_importance = est.feature_importances_
feature_importance = 100.0 * (feature_importance / feature_importance.max())

indices = np.argsort(feature_importance)[-10:]
plt.barh(np.arange(10), feature_importance[indices],color='dodgerblue',alpha=.4)
plt.yticks(np.arange(10 + 0.25), np.array(X.columns)[indices])
_ = plt.xlabel('Relative importance'), plt.title('Top Ten Important Variables')

png

Other Model

import xgboost as xgb
from sklearn.ensemble import ExtraTreesRegressor, RandomForestRegressor
# XGBoost
clf2 = xgb.XGBClassifier(n_estimators=50, max_depth=1, 
                            learning_rate=0.01, subsample=0.8, colsample_bytree=0.3,scale_pos_weight=3.0, 
                             silent=True, nthread=-1, seed=0, missing=None,objective='binary:logistic', 
                             reg_alpha=1, reg_lambda=1, 
                             gamma=0, min_child_weight=1, 
                             max_delta_step=0,base_score=0.5)

clf2.fit(x_train, y_train)
print clf2.score(x_test, y_test)
test_pd2 = pd.DataFrame()
test_pd2['predict'] = clf2.predict(x_test)
test_pd2['label'] = y_test
print compute_ks(test_pd[['label','predict']])
print clf2.feature_importances_
# Top Ten
feature_importance = clf2.feature_importances_
feature_importance = 100.0 * (feature_importance / feature_importance.max())

indices = np.argsort(feature_importance)[-10:]
plt.barh(np.arange(10), feature_importance[indices],color='dodgerblue',alpha=.4)
plt.yticks(np.arange(10 + 0.25), np.array(X.columns)[indices])
_ = plt.xlabel('Relative importance'), plt.title('Top Ten Important Variables')
0.973252109372
0.0
[ 0.          0.30769232  0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.05128205
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.          0.          0.          0.          0.          0.          0.
  0.05128205  0.30769232  0.2820513   0.          0.          0.          0.
  0.        ]

png

# RFR
clf3 = RandomForestRegressor(n_jobs=-1, max_depth=10,random_state=0)
clf3.fit(x_train, y_train)
print clf3.score(x_test, y_test)
test_pd3 = pd.DataFrame()
test_pd3['predict'] = clf3.predict(x_test)
test_pd3['label'] = y_test
print compute_ks(test_pd[['label','predict']])
print clf3.feature_importances_
# Top Ten
feature_importance = clf3.feature_importances_
feature_importance = 100.0 * (feature_importance / feature_importance.max())

indices = np.argsort(feature_importance)[-10:]
plt.barh(np.arange(10), feature_importance[indices],color='dodgerblue',alpha=.4)
plt.yticks(np.arange(10 + 0.25), np.array(X.columns)[indices])
_ = plt.xlabel('Relative importance'), plt.title('Top Ten Important Variables')
0.0148713087517
0.0
[ 0.02588781  0.10778862  0.00734994  0.02090219  0.02231172  0.00778016
  0.00556834  0.01097013  0.00734689  0.0017027   0.00622544  0.01140843
  0.00530896  0.00031185  0.01135318  0.          0.01488991  0.01840559
  0.00585621  0.00652523  0.0066759   0.00727607  0.00955013  0.01004672
  0.01785864  0.00855197  0.00985739  0.01477432  0.02184904  0.01816184
  0.00878854  0.02078236  0.01310288  0.00844302  0.01596395  0.01825196
  0.01817367  0.00297759  0.00084823  0.02808718  0.02917066  0.00897034
  0.01139324  0.01532409  0.01467681  0.0032855   0.01066291  0.00581661
  0.00955357  0.00417743  0.01333577  0.00489264  0.0128039   0.01340195
  0.01286394  0.01619219  0.00395603  0.00508973  0.          0.00234757
  0.00378329  0.00502684  0.01732834  0.01178674  0.00030035  0.01189509
  0.00942532  0.00841645  0.01571355  0.00288054  0.          0.0011667
  0.00106548  0.00488734  0.          0.00200132  0.00062765  0.04130873
  0.10076558  0.00022293  0.00165858  0.00308408  0.0008255   0.        ]

png

# XTR
clf4 = ExtraTreesRegressor(n_jobs=-1, max_depth=10,random_state=0)
clf4.fit(x_train, y_train)
print clf4.score(x_test, y_test)
test_pd4 = pd.DataFrame()
test_pd4['predict'] = clf4.predict(x_test)
test_pd4['label'] = y_test
print compute_ks(test_pd[['label','predict']])
print clf4.feature_importances_
# Top Ten
feature_importance = clf4.feature_importances_
feature_importance = 100.0 * (feature_importance / feature_importance.max())

indices = np.argsort(feature_importance)[-10:]
plt.barh(np.arange(10), feature_importance[indices],color='dodgerblue',alpha=.4)
plt.yticks(np.arange(10 + 0.25), np.array(X.columns)[indices])
_ = plt.xlabel('Relative importance'), plt.title('Top Ten Important Variables')
0.020808034579
0.0
[ 0.00950112  0.17496689  0.00476969  0.00538677  0.00898343  0.01604885
  0.0139889   0.00605683  0.0042762   0.00358536  0.0144985   0.00915189
  0.00643305  0.00637134  0.0050764   0.00218012  0.00925068  0.00363339
  0.00988441  0.00645297  0.00662444  0.00934969  0.00739012  0.00635592
  0.00633908  0.00923972  0.01263829  0.01190224  0.00914159  0.00402144
  0.00917841  0.01456563  0.01161155  0.01097394  0.00506868  0.00772159
  0.00560163  0.01132941  0.00172528  0.0085601   0.01282485  0.00970629
  0.00956066  0.00731205  0.02087289  0.00430205  0.0062769   0.00765693
  0.00922104  0.00296456  0.00563208  0.00459181  0.0133819   0.00548208
  0.00450864  0.0132415   0.00677772  0.00509891  0.00108962  0.00578448
  0.00934323  0.00715127  0.01078137  0.00855071  0.00695096  0.01488993
  0.00317962  0.00485367  0.00476553  0.00509674  0.          0.00733654
  0.00097223  0.00380448  0.00534715  0.00356893  0.0128526   0.11944538
  0.11758343  0.00195945  0.00225379  0.00243429  0.0007562   0.        ]

png

作业:

1. feature-engineering

2. stacking

3. 画出ROC曲线和KS曲线对比

# 特征工程方法1:histogram
def get_histogram_features(full_dataset):
    def extract_histogram(x):
        count, _ = np.histogram(x, bins=[0, 10, 100, 1000, 10000, 100000, 1000000, 9000000])
        return count
    column_names = ["hist_{}".format(i) for i in range(8)]
    hist = full_dataset.apply(lambda row: pd.Series(extract_histogram(row)), axis=1)
    hist.columns= column_names
    RETURN hist
# 特征工程方法2:quantile
q = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
column_names = ["quantile_{}".format(i) for i in q]
# print pd.DataFrame(train_x)
quantile = pd.DataFrame(x_train).quantile(q=q, axis=1).T
quantile.columns = column_names
# 特征工程方法3:cumsum
def get_cumsum_features(all_features):
    column_names = ["cumsum_{}".format(i) for i in range(len(all_features))]
    cumsum = full_dataset[all_features].cumsum(axis=1)
    cumsum.columns = column_names
    return cumsum
# 特征工程方法4:特征归一化
from sklearn.preprocessing import MinMaxScaler
Scaler = MinMaxScaler()
x_train_normal = Scaler.fit_transform(x_train_normal)
  • 2
    点赞
  • 11
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值