Keras Regressor example(总结自莫凡)

"""
To know more or get code samples, please visit my website:
https://morvanzhou.github.io/tutorials/
Or search: 莫烦Python
Thank you for supporting!
"""

# please note, all tutorial code are running under python3.5.
# If you use the version like python2.7, please modify the code accordingly

# 4 - Regressor example

import numpy as np
np.random.seed(1337)  # for reproducibility
from keras.models import Sequential
from keras.layers import Dense
import matplotlib.pyplot as plt

# create some data
X = np.linspace(-1, 1, 200)
np.random.shuffle(X)    # randomize the data
Y = 0.5 * X + 2 + np.random.normal(0, 0.05, (200, ))
# plot data
plt.scatter(X, Y)
plt.show()

X_train, Y_train = X[:160], Y[:160]     # first 160 data points
X_test, Y_test = X[160:], Y[160:]       # last 40 data points

# build a neural network from the 1st layer to the last layer
model = Sequential()
model.add(Dense(output_dim=1, input_dim=1))

# choose loss function and optimizing method
model.compile(loss='mse', optimizer='sgd')

# training
print('Training -----------')
for step in range(301):
    cost = model.train_on_batch(X_train, Y_train)
    if step % 100 == 0:
        print('train cost: ', cost)

# test
print('\nTesting ------------')
cost = model.evaluate(X_test, Y_test, batch_size=40)
print('test cost:', cost)
W, b = model.layers[0].get_weights()
print('Weights=', W, '\nbiases=', b)

# plotting the prediction
Y_pred = model.predict(X_test)
plt.scatter(X_test, Y_test)
plt.plot(X_test, Y_pred)
plt.show()

#Sequential Model:顺序层模型

from  keras.models import Sequential

#全连接层

from keras.layers import Dense

#可视化模块

import  matplotlib.pyplot as plt

#build a neural network from the 1st layer to the last layer

model=Sequential()

#加层#如果有多层,第二层的输入默认是第一层的输出,不用添加输入

model.add(Dense(output_dim=1,input_dim=1))

#choose loss function and optimizing method

#loss 为计算误差的函数。mse:均方误差(Mean Squared Error, MSE)是衡量“平均误差”的一种较方便的方法,可以评价数据的变化程度。均方根误差是均方误差的算术平方根。

#optimizer为优化器  sgd:随机梯度下降(stochastic gradient descent)

model.compile(loss='mse',optimizer='sgd' )

#train

print(‘training ------’)

for step in range(301):

#在这里每一批的数据是一样的,是供实验

    cost=model.train_on_batch(X_train,Y_train)

    if(step%100=0):

          print ('train cost:',cost)

#Test

print('\n test------')

cost=model.evaluate(X_test,Y_test,batch_size=40)

print ('test cost:',cost)

#获取权重和偏移量 ,类似y=wx+b;

W,b=model.layers[0].get_weights()

print ('Weight:',W,'\nbiases=',b)



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值