sklearn 逻辑回归

github地址 github.com/yangjinghit…

import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
from IPython.display import Image
复制代码
Image(url='https://user-gold-cdn.xitu.io/2018/12/28/167f39089be54020?w=1280&h=720&f=jpeg&s=67044')
复制代码
data = pd.read_csv("credit-a.csv", header = None)
复制代码
data.head()
复制代码
0123456789101112131415
0030.830.00000901.25001102020.0-1
1158.674.46000813.040061043560.0-1
2124.500.50000811.5001010280824.0-1
3027.831.54000903.75005001003.0-1
4020.175.62500901.71010121200.0-1
from sklearn.model_selection import train_test_split
from sklearn.linear_model.logistic import LogisticRegression
复制代码
x = data[data.columns[:-1]]
复制代码
y = data[15].replace(-1,0)
复制代码
x_train, x_test, y_train, y_test = train_test_split(x, y)
复制代码
model = LogisticRegression()
复制代码
model.fit(x_train, y_train)
复制代码
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
          intercept_scaling=1, max_iter=100, multi_class='ovr', n_jobs=1,
          penalty='l2', random_state=None, solver='liblinear', tol=0.0001,
          verbose=0, warm_start=False)
复制代码
model.predict_proba(x_test)
复制代码
array([[7.09635067e-01, 2.90364933e-01],
       [7.35430880e-02, 9.26456912e-01],
       [8.14498784e-01, 1.85501216e-01],
       [9.44070832e-01, 5.59291681e-02],
       [5.39802663e-01, 4.60197337e-01],
       [8.86407721e-01, 1.13592279e-01],
       [9.66748985e-01, 3.32510149e-02],
       [4.35188296e-02, 9.56481170e-01],
       [8.91381572e-01, 1.08618428e-01],
       [9.68843392e-02, 9.03115661e-01],
       [8.16079150e-01, 1.83920850e-01],
       [2.24947238e-02, 9.77505276e-01],
       [5.67223387e-02, 9.43277661e-01],
       [2.88795212e-02, 9.71120479e-01],
       [9.63950989e-01, 3.60490112e-02],
       [4.42696233e-01, 5.57303767e-01],
       [4.94643382e-02, 9.50535662e-01],
       [4.40378843e-01, 5.59621157e-01],
       [3.21203690e-01, 6.78796310e-01],
       [5.05669750e-01, 4.94330250e-01],
       [7.62207699e-01, 2.37792301e-01],
       [7.69630417e-01, 2.30369583e-01],
       [7.40170943e-01, 2.59829057e-01],
       [6.84076367e-01, 3.15923633e-01],
       [9.71696389e-01, 2.83036111e-02],
       [4.75150407e-01, 5.24849593e-01],
       [7.49336542e-01, 2.50663458e-01],
       [7.77502212e-02, 9.22249779e-01],
       [9.99546733e-01, 4.53266846e-04],
       [8.80210870e-02, 9.11978913e-01],
       [9.93781717e-01, 6.21828253e-03],
       [8.28208535e-01, 1.71791465e-01],
       [3.66660333e-01, 6.33339667e-01],
       [8.34433520e-01, 1.65566480e-01],
       [5.31220989e-02, 9.46877901e-01],
       [6.98269904e-01, 3.01730096e-01],
       [5.16818370e-02, 9.48318163e-01],
       [6.23260723e-01, 3.76739277e-01],
       [8.94264944e-01, 1.05735056e-01],
       [7.53824927e-02, 9.24617507e-01],
       [7.11242168e-01, 2.88757832e-01],
       [4.41655443e-02, 9.55834456e-01],
       [7.25713996e-01, 2.74286004e-01],
       [1.05314580e-01, 8.94685420e-01],
       [6.65925884e-02, 9.33407412e-01],
       [7.84854013e-01, 2.15145987e-01],
       [5.29850331e-02, 9.47014967e-01],
       [5.89222677e-02, 9.41077732e-01],
       [9.64085006e-01, 3.59149944e-02],
       [9.48479404e-01, 5.15205960e-02],
       [9.95397268e-01, 4.60273160e-03],
       [9.86867262e-01, 1.31327379e-02],
       [4.05964607e-02, 9.59403539e-01],
       [7.53411034e-01, 2.46588966e-01],
       [5.09641514e-02, 9.49035849e-01],
       [1.38703123e-01, 8.61296877e-01],
       [5.72736276e-02, 9.42726372e-01],
       [7.18435231e-01, 2.81564769e-01],
       [6.23633507e-02, 9.37636649e-01],
       [5.13962504e-02, 9.48603750e-01],
       [1.64337398e-01, 8.35662602e-01],
       [6.02571402e-01, 3.97428598e-01],
       [9.99374438e-01, 6.25561619e-04],
       [6.35027956e-02, 9.36497204e-01],
       [9.96923862e-01, 3.07613822e-03],
       [1.50439442e-01, 8.49560558e-01],
       [9.39336438e-01, 6.06635619e-02],
       [8.52983482e-01, 1.47016518e-01],
       [8.41830094e-01, 1.58169906e-01],
       [6.02282588e-01, 3.97717412e-01],
       [5.34016227e-02, 9.46598377e-01],
       [5.01540842e-02, 9.49845916e-01],
       [5.00755917e-02, 9.49924408e-01],
       [6.63001244e-01, 3.36998756e-01],
       [5.02642731e-02, 9.49735727e-01],
       [1.35365347e-01, 8.64634653e-01],
       [9.71265728e-01, 2.87342723e-02],
       [6.04860967e-01, 3.95139033e-01],
       [1.08038220e-01, 8.91961780e-01],
       [7.29442220e-02, 9.27055778e-01],
       [6.19500785e-01, 3.80499215e-01],
       [7.11932222e-01, 2.88067778e-01],
       [8.00871297e-01, 1.99128703e-01],
       [7.92137310e-02, 9.20786269e-01],
       [9.96999248e-01, 3.00075192e-03],
       [1.31409491e-01, 8.68590509e-01],
       [5.17531153e-01, 4.82468847e-01],
       [8.39131781e-01, 1.60868219e-01],
       [6.31963839e-02, 9.36803616e-01],
       [9.72311326e-01, 2.76886740e-02],
       [8.27220390e-01, 1.72779610e-01],
       [7.66088226e-01, 2.33911774e-01],
       [7.58753478e-01, 2.41246522e-01],
       [6.14186842e-01, 3.85813158e-01],
       [4.61259220e-01, 5.38740780e-01],
       [5.96981658e-01, 4.03018342e-01],
       [4.27309032e-02, 9.57269097e-01],
       [8.02175155e-01, 1.97824845e-01],
       [3.86314870e-02, 9.61368513e-01],
       [9.99999984e-01, 1.61604801e-08],
       [6.36683620e-01, 3.63316380e-01],
       [3.93827799e-02, 9.60617220e-01],
       [9.79642873e-01, 2.03571273e-02],
       [3.40823381e-02, 9.65917662e-01],
       [3.94216811e-01, 6.05783189e-01],
       [5.70783349e-03, 9.94292167e-01],
       [1.25931226e-01, 8.74068774e-01],
       [3.08716539e-02, 9.69128346e-01],
       [5.64805463e-02, 9.43519454e-01],
       [8.93636762e-01, 1.06363238e-01],
       [5.20442340e-01, 4.79557660e-01],
       [9.02466419e-02, 9.09753358e-01],
       [8.94304644e-01, 1.05695356e-01],
       [7.08698021e-02, 9.29130198e-01],
       [1.54573196e-01, 8.45426804e-01],
       [5.55802501e-02, 9.44419750e-01],
       [6.35218911e-02, 9.36478109e-01],
       [6.18616687e-02, 9.38138331e-01],
       [4.47491985e-02, 9.55250801e-01],
       [5.58359731e-01, 4.41640269e-01],
       [6.22534044e-01, 3.77465956e-01],
       [4.26119388e-02, 9.57388061e-01],
       [3.92041022e-02, 9.60795898e-01],
       [7.20033284e-02, 9.27996672e-01],
       [7.47817489e-02, 9.25218251e-01],
       [4.79225542e-02, 9.52077446e-01],
       [9.59996419e-01, 4.00035808e-02],
       [7.26786050e-02, 9.27321395e-01],
       [9.91691673e-01, 8.30832674e-03],
       [7.60256421e-01, 2.39743579e-01],
       [6.33335098e-02, 9.36666490e-01],
       [8.52013734e-01, 1.47986266e-01],
       [1.02505647e-01, 8.97494353e-01],
       [7.15514237e-02, 9.28448576e-01],
       [4.93411515e-01, 5.06588485e-01],
       [8.63997652e-01, 1.36002348e-01],
       [3.38087679e-02, 9.66191232e-01],
       [1.97885030e-01, 8.02114970e-01],
       [9.16692915e-01, 8.33070852e-02],
       [4.10532938e-02, 9.58946706e-01],
       [1.13508316e-01, 8.86491684e-01],
       [6.53752576e-01, 3.46247424e-01],
       [9.63307199e-01, 3.66928009e-02],
       [1.37315122e-01, 8.62684878e-01],
       [9.86218098e-01, 1.37819019e-02],
       [1.12941614e-01, 8.87058386e-01],
       [6.82427290e-01, 3.17572710e-01],
       [9.99999606e-01, 3.94129631e-07],
       [7.83289704e-02, 9.21671030e-01],
       [9.62252811e-01, 3.77471895e-02],
       [9.41035655e-01, 5.89643451e-02],
       [6.67697761e-02, 9.33230224e-01],
       [8.01515612e-01, 1.98484388e-01],
       [8.89771483e-01, 1.10228517e-01],
       [1.35633719e-01, 8.64366281e-01],
       [9.20740483e-01, 7.92595175e-02],
       [8.16368287e-01, 1.83631713e-01],
       [3.52583541e-01, 6.47416459e-01],
       [9.27254503e-02, 9.07274550e-01],
       [7.96691273e-02, 9.20330873e-01],
       [1.21543476e-01, 8.78456524e-01],
       [9.96561336e-01, 3.43866446e-03],
       [1.48538977e-01, 8.51461023e-01],
       [5.50211447e-02, 9.44978855e-01]])
复制代码
model.predict(x_test)
复制代码
array([0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0,
       0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1,
       1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1,
       0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0,
       1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0,
       0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0,
       1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0,
       1, 0, 0, 1, 1, 1, 1, 0, 1, 1])
复制代码
from sklearn.metrics import accuracy_score
复制代码
accuracy_score(model.predict(x_test),y_test)
复制代码
0.8536585365853658
复制代码
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值