import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
from sklearn.linear_model import LinearRegression
X = np.linspace(0,10,50).reshape(-1,1)
X = np.concatenate([X**2,X],axis = 1)
X.shape
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/c7967f8465a8f81f8c5d004f802138af.png)
w = np.random.randint(1,10,size=2)
b = np.random.randint(-5,5,size=1)
y = X.dot(w) + b
y.shape
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/412e5ee71c52896cc2db421a0c2b3f20.png)
plt.plot(X[:,1],y,color = 'r')
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/017e6892716e405800af18b7046cb787.png)
plt.plot(X[:,1],y,color = 'r')
plt.title('w1:%d.w2:%d.b:%d'%(w[0],w[1],b[0]))
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/4b58fdc574c08795a63545fbf2083937.png)
lr = LinearRegression()
lr.fit(X,y)
print(lr.coef_,lr.intercept_)
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/e272e905647a61cac51561a3af4e5315.png)
plt.plot(X[:,1],y,color = 'r')
plt.title('w1:%d.w2:%d.b:%d'%(w[0],w[1],b[0]))
plt.scatter(X[:,1],y,marker = '*')
x = np.linspace(-2,12,100)
plt.plot(x,2*x**2 + 6*x-4,color = 'green')
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/e99244ed556ab46da4d673b2bde41264.png)
def gradient_descent(X,y,lr,epoch,w,b):
batch = len(X)
for i in range(epoch):
d_loss = 0
dw = [0 for _ in range(len(w))]
db = 0
for j in range(batch):
y_ = 0
for n in range(len(w)):
y_ += X[j][n]*w[n]
y_ += b
d_loss = -(y[j]-y_)
for n in range(len(w)):
dw[n] += X[j][n]*d_loss/float(batch)
db += 1*d_loss/float(batch)
for n in range(len(w)):
w[n] -= dw[n]*lr[n]
b -= db*lr[0]
return w,b
lr = [0.0001,0.001]
w = np.random.randn(2)
b = np.random.randn(1)[0]
w_,b_ = gradient_descent(X,y,lr,500,w,b)
print(w_,b_)
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/42933f2b622143aec6df2933f1c2ccb5.png)
plt.scatter(X[:,1],y,marker = '*')
x = np.linspace(-2,12,100)
f = lambda x:w_[0]*x**2 + w_[1]*x + b_
plt.plot(x,f(x),color = 'g')
![在这里插入图片描述](https://i-blog.csdnimg.cn/blog_migrate/adbe6e86a5b9f6222d8bb2e7fd5448fa.png)