线性回归与逻辑回归实战

逻辑回归实战:逻辑回归实现鸢尾花数据的分类
注意知识点:在类别标签y中是具体的字符串,记得用sklearn自带的处理标签,让其最后用0,1,2表示三个类
#!/usr/bin/python
# -*- coding:utf-8 -*-

import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn import preprocessing
import pandas as pd
from sklearn.model_selection import train_test_split

path='/Users/apple/Desktop/10.Regression/10.iris.data'
df=pd.read_csv(path,header=None)
x=df.iloc[:,:-1]
y1=df.iloc[:,-1]
le=preprocessing.LabelEncoder()
y=le.fit_transform(y1)
print y
x_train,x_test,y_train,y_test=train_test_split(x,y,random_state=1)
model=LogisticRegression()
model.fit(x_train,y_train)
y_hat=model.predict(x_test)
y_hat_prob=model.predict_proba(x_test)
np.set_printoptions(suppress=True)
auc=100*np.mean(y_hat == y_test)
print y_hat
print '======'
print y_hat_prob
print "========="
print u'准确率:%f'%auc
 
线性回归实战:对销量的预测
注意事项:通过三个图片我们知道newspaper对销售的影响不是呈线性关系的,因而我们在模型中不使用此特征。
#!/usr/bin/python
# -*- coding:utf-8 -*-

import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import matplotlib as mlp
import matplotlib.pyplot as plt

path ='/Users/apple/Desktop/10.Regression/10.Advertising.csv'
data = pd.read_csv(path)
x = data[['TV','Radio']]
y = data['Sales']

plt.figure()
plt.subplot(311)
plt.plot(data['TV'],y,'ro')
plt.title('TV')
plt.subplot(312)
plt.plot(data['Radio'],y,'go')
plt.title('Radio')
plt.subplot(313)
plt.plot(data['Newspaper'],y,'bo')
plt.title('Newspapaer')
plt.show()

x_train,x_test,y_train,y_test= train_test_split(x,y,train_size=0.7,random_state=1)
lr= LinearRegression()
model = lr.fit(x_train,y_train)
print model
print lr.coef_
print lr.intercept_

y_hat = lr.predict(np.array(x_test))
mse = np.average((y_hat-np.array(y_test))**2)
rmse = np.sqrt(mse)
print mse,rmse

t = np.arange(len(x_test))
plt.plot(t, y_test, 'r-', linewidth=2, label='true data')
plt.plot(t, y_hat, 'g-', linewidth=2, label='predict data')
plt.legend(loc='upper right')
plt.title('LinearRegression predict sale', fontsize=18)
plt.grid()
plt.show()
lasso回归:销售量预测
注意:lasso回归会自动提取特征,但是没有Ridge回归的效果好,因而我们实践中常常用其两者的折中,即Elastic Net。
     在代码中寻找最优参数是通过GridSerchCV实现的。
#!/usr/bin/python
# -*- coding:utf-8 -*-

import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import Lasso, Ridge
from sklearn.model_selection import GridSearchCV


if __name__ == "__main__":
    # pandas读入
    data = pd.read_csv('10.Advertising.csv')    # TV、Radio、Newspaper、Sales
    x = data[['TV', 'Radio', 'Newspaper']]
    # x = data[['TV', 'Radio']]
    y = data['Sales']
    print x
    print y

    x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=1)
    model = Lasso()
    # model = Ridge()
    alpha_can = np.logspace(-3, 2, 10)
    lasso_model = GridSearchCV(model, param_grid={'alpha': alpha_can}, cv=6)
    lasso_model.fit(x_train, y_train)
    print '超参数:\n', lasso_model.best_params_

    y_hat = lasso_model.predict(np.array(x_test))
    print lasso_model.score(x_test, y_test)
    mse = np.average((y_hat - np.array(y_test)) ** 2)  # Mean Squared Error
    rmse = np.sqrt(mse)  # Root Mean Squared Error
    print mse, rmse

    t = np.arange(len(x_test))
    mpl.rcParams['font.sans-serif'] = [u'simHei']
    mpl.rcParams['axes.unicode_minus'] = False
    plt.plot(t, y_test, 'r-', linewidth=2, label=u'真实数据')
    plt.plot(t, y_hat, 'g-', linewidth=2, label=u'预测数据')
    plt.title(u'线性回归预测销量', fontsize=18)
    plt.legend(loc='upper right')
    plt.grid()
    plt.show()

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值