#-*- coding:utf-8 -*-
import numpy as np
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
x = np.linspace(0, 10, 50)
noise = np.random.uniform(-1, 2, size=50)
# 一元回归
y = 5 * x + 6 + noise
liner = LinearRegression()
liner.fit(np.reshape(x, (-1, 1)), np.reshape(y, (-1, 1)))
y_pred = liner.predict(np.reshape(x, (-1, 1)))
plt.figure(figsize=(5, 5))
plt.scatter(x, y)
plt.plot(x, y_pred, color="r")
plt.show()
print(liner.coef_)
print(liner.intercept_)
# 二元或多元回归改变的是维度
X = [[147, 9], [129, 7], [141, 9], [145, 11],[142, 11],[151, 13]]
y = [[34], [23],[25],[47],[26], [46]]
liner = LinearRegression()
liner.fit(X, y)
x_test = [[149, 11], [152, 12], [140, 8], [138, 10], [132, 7], [147,10]]
y_test =[[41], [37], [28], [27], [21], [38]]
predictions = liner.predict(x_test)
plt.plot(y_test, label='y_test')
plt.plot(predictions, label='predictions')
plt.show()
plt.legend
print(liner.coef_)
print(liner.intercept_)
# 多项式回归 degree参数代表的是k=2多项式的度
from sklearn. preprocessing import PolynomialFeatures
quadratic_fearurizer = PolynomialFeatures(degree=2)
X_train_quadratic = quadratic_fearurizer.fit_transform(X_train)
X_test_quadratic = quadratic_fearurizer.transform(X_test)
# 逻辑回归
from sklearn. datasets import load_iris
from sklearn.linear_model import LogisticRegression
iris = load_iris()
X = iris.data[:, :2]
Y = iris.target
# 建立模型
lr = LogisticRegression(C=1e5, solver='liblinear')
lr.fit(X, Y)
Linear
最新推荐文章于 2023-12-08 15:28:14 发布