# [3]深度学习和Keras----Keras深度学习框架入门例子

# Import package
import numpy as np
np.random.seed(1337)
from keras.models import Sequential
from keras.layers import Dense
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt

# 1.Build the trainning data
X=np.linspace(-1,1,200)
np.random.shuffle(X)
np.random.normal()
Y=0.5*X+2+np.random.normal(0,0.05,(200,))
plt.scatter(X,Y)
plt.show()
X_train,Y_train=X[:160],Y[:160]
X_test,Y_test=X[160:],Y[160:]
print(X)
print("*******************************************")
print(Y)

# 2.Build a neural network from the 1st layer to the last layer
model=Sequential()

#3. Choose loss function and optimzing method
model.compile(loss='mse',optimizer='sgd')

#4. Trainning
print("Training......")
for step in range(1400):
cost=model.train_on_batch(X_train,Y_train)
if step % 100 ==0:
print('train cost',cost)
#5.Test
print("\n Testing...........")
cost=model.evaluate(X_test,Y_test,batch_size=40)
print("Test cost:",cost)
W,b=model.layers[0].get_weights()
print('Weight=',W,"\nbiases=",b)

#6.Plotting the prediction
Y_pred=model.predict(X_test)
plt.scatter(X_test,Y_test)
plt.plot(X_test,Y_pred)
plt.show()

ubuntu@keras:~/keraslearn\$ python3 helloworld.py
Using TensorFlow backend.
[-0.70854271  0.1758794  -0.30653266  0.74874372 -0.02512563  0.33668342
-0.85929648  0.01507538 -0.13567839  0.72864322  0.24623116 -0.74874372
-0.78894472  0.50753769  0.03517588  0.35678392 -0.55778894  0.2361809
-0.25628141 -0.44723618  0.2160804  -0.43718593 -0.64824121  0.69849246
-0.03517588 -0.45728643  0.86934673  0.73869347  0.53768844 -0.67839196
-0.75879397  0.55778894  0.28643216 -0.05527638 -0.86934673  0.1959799
-0.57788945 -0.9798995  -0.6080402  -0.63819095  0.84924623  0.41708543
0.13567839  0.79899497 -0.47738693  0.46733668  0.59798995 -0.80904523
-0.98994975 -0.36683417 -0.5678392  -0.00502513 -0.53768844 -0.37688442
-0.65829146 -0.1959799   0.06532663  0.44723618 -0.01507538 -0.6281407
0.02512563 -0.71859296 -0.14572864 -0.46733668  0.07537688  0.85929648
0.76884422  0.40703518 -0.68844221  0.68844221 -0.29648241  0.66834171
-0.95979899 -0.33668342  0.26633166 -0.82914573  1.         -0.5879397
-0.69849246 -0.20603015  0.63819095 -0.88944724 -0.40703518 -0.32663317
0.15577889 -0.41708543  0.10552764  0.20603015 -0.04522613  0.00502513
-0.31658291  0.43718593  0.42713568  0.45728643 -0.59798995 -0.66834171
0.83919598  0.75879397 -0.24623116  0.71859296 -0.92964824  0.39698492
0.61809045 -0.84924623 -0.87939698 -0.96984925  0.87939698  0.6281407
0.25628141  0.27638191  0.12562814  0.09547739 -0.89949749  0.80904523
-0.16582915 -0.12562814  0.30653266  0.49748744  0.5879397  -0.51758794
-0.10552764  0.54773869 -0.94974874  0.92964824  0.16582915 -0.83919598
-0.35678392 -0.48743719  0.08542714 -0.61809045  0.18592965  0.57788945
0.65829146  0.38693467  0.91959799 -0.26633166 -0.50753769 -1.
-0.54773869  0.6080402  -0.49748744 -0.22613065  0.9798995   0.98994975
0.5678392   0.32663317  0.64824121 -0.52763819  0.36683417  0.81909548
-0.11557789  0.31658291 -0.2160804   0.95979899  0.77889447 -0.73869347
-0.81909548 -0.79899497  0.78894472  0.88944724 -0.2361809   0.37688442
0.70854271  0.22613065 -0.28643216 -0.38693467  0.90954774 -0.91959799
0.48743719 -0.42713568 -0.08542714  0.11557789 -0.18592965  0.47738693
-0.39698492 -0.34673367  0.04522613  0.05527638  0.93969849 -0.77889447
-0.93969849 -0.06532663 -0.72864322  0.29648241  0.52763819 -0.76884422
0.94974874  0.82914573  0.34673367 -0.90954774 -0.27638191 -0.15577889
-0.1758794   0.14572864 -0.09547739  0.96984925  0.67839196 -0.07537688
0.89949749  0.51758794]
*******************************************
[ 1.67999883  2.15732476  1.86215827  2.35166736  1.93572236  2.18016038
1.61608065  2.03423062  1.99563479  2.37665292  2.18481192  1.68394294
1.62204409  2.26767077  1.98888272  2.22171139  1.71104094  2.17892658
1.91484027  1.82024419  2.16604711  1.81951961  1.54483872  2.34601517
2.00887174  1.77313928  2.37743864  2.31814604  2.32716849  1.68305339
1.60808704  2.24648868  2.26423579  1.96845047  1.66389213  2.1151824
1.76544914  1.5408011   1.68818555  1.65445629  2.34232127  2.21861488
2.07349814  2.44520484  1.68813908  2.23841182  2.28008276  1.61527365
1.45239671  1.86837714  1.73063808  2.01510789  1.78710972  1.81579434
1.7282564   1.88204563  2.12479282  2.27787801  2.04415445  1.64586279
2.02443768  1.69792605  1.90274178  1.78137239  2.02497528  2.42132526
2.42777448  2.16564271  1.68763444  2.31923599  1.81271447  2.34163208
1.48248635  1.78163246  2.0686663   1.56576829  2.57615655  1.65904659
1.57722512  1.81629077  2.26556137  1.61007302  1.88352331  1.71313059
2.10104146  1.7038275   1.97249884  2.10127479  2.07381404  2.11472407
1.83211308  2.26576731  2.23235442  2.28700014  1.72663184  1.71425067
2.44958621  2.48620331  1.89267965  2.35438093  1.57974268  2.22254286
2.23280715  1.50250543  1.58598131  1.40667751  2.50025819  2.37980426
2.19965578  2.12256837  2.05612666  1.98159549  1.49287023  2.43805289
1.87463195  2.04988238  2.12882858  2.2546237   2.34223092  1.65392499
1.99025412  2.28291716  1.45807616  2.51044748  1.97988068  1.66059626
1.7961923   1.75129171  2.0241939   1.69914728  2.07346622  2.33384759
2.35610171  2.22847144  2.39450148  1.84129314  1.69452659  1.55065089
1.66503047  2.28833885  1.65681898  1.9227888   2.46308462  2.55316759
2.3420487   2.1586477   2.31608103  1.72595194  2.20947867  2.43358179
1.92711345  2.09783848  1.90011119  2.3742013   2.39525115  1.61921788
1.60036827  1.61487721  2.32314506  2.47010224  1.78412147  2.20253779
2.29520865  2.13392445  1.88572837  1.80240843  2.37782058  1.53947432
2.21682792  1.80241496  1.9832941   2.19691473  1.98446141  2.25189264
1.79373247  1.91751681  2.04975192  1.95401037  2.47210723  1.64954777
1.53229024  1.89811048  1.56746323  2.10295781  2.18903255  1.5645517
2.47228584  2.29823263  2.17956294  1.51738366  1.86618515  1.92440005
1.77639738  2.04854509  1.89432423  2.44443885  2.40954465  1.93669521
2.4738211   2.26099266]
helloworld.py:25: UserWarning: Update your Dense call to the Keras 2 API: Dense(units=1, input_dim=1)
Training......
2017-05-30 13:52:34.569118: W tensorflow/core/platform/cpu_feature_guard.cc:45] The TensorFlow library wasn't compiled to use SSE4.1 instructions, but these are available on your machine and could speed up CPU computations.
2017-05-30 13:52:34.569158: W tensorflow/core/platform/cpu_feature_guard.cc:45] The TensorFlow library wasn't compiled to use SSE4.2 instructions, but these are available on your machine and could speed up CPU computations.
2017-05-30 13:52:34.569165: W tensorflow/core/platform/cpu_feature_guard.cc:45] The TensorFlow library wasn't compiled to use AVX instructions, but these are available on your machine and could speed up CPU computations.
2017-05-30 13:52:34.569169: W tensorflow/core/platform/cpu_feature_guard.cc:45] The TensorFlow library wasn't compiled to use AVX2 instructions, but these are available on your machine and could speed up CPU computations.
2017-05-30 13:52:34.569176: W tensorflow/core/platform/cpu_feature_guard.cc:45] The TensorFlow library wasn't compiled to use FMA instructions, but these are available on your machine and could speed up CPU computations.
train cost 4.02812
train cost 0.0807406
train cost 0.00588549
train cost 0.00316872
train cost 0.00276465
train cost 0.00266697
train cost 0.00264223
train cost 0.00263594
train cost 0.00263434
train cost 0.00263394
train cost 0.00263383
train cost 0.00263381
train cost 0.0026338
train cost 0.0026338

Testing...........
40/40 [==============================] - 0s
Test cost: 0.00337669742294
Weight= [[ 0.5063296]]
biases= [ 2.00427961]

### @ 导入相关的Python和Keras的模块（module）

import numpy as np
np.random.seed(1337)
from keras.models import Sequential
from keras.layers import Dense
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt

import matplotlib
matplotlib.use('Agg')

### @ 随机生成200个数字并模拟一个线性函数

# 1.Build the trainning data
X=np.linspace(-1,1,200)
np.random.shuffle(X)
np.random.normal()
Y=0.5*X+2+np.random.normal(0,0.05,(200,))
plt.scatter(X,Y)
plt.show()
X_train,Y_train=X[:160],Y[:160]
X_test,Y_test=X[160:],Y[160:]
print(X)
print("*******************************************")
print(Y)

import matplotlib
matplotlib.use('Agg')

### @ 用Keras的API建立一个神经网络模型

# 2.Build a neural network from the 1st layer to the last layer
model=Sequential()

#3. Choose loss function and optimzing method
model.compile(loss='mse',optimizer='sgd')

• mean_squared_error
• mean_absolute_error
• mean_absolute_percentage_error
• mean_squared_logarithmic_error
• squared_hinge
• hinge
• logcosh
• categorical_crossentropy
• sparse_categorical_crossentropy
• binary_crossentropy
• kullback_leibler_divergence
• poisson
• cosine_proximity

https://keras.io/losses/

https://github.com/fchollet/keras/blob/master/keras/losses.py

• RMSprop
• TFOptimizer

### @ 分批次训练

#4. Trainning
print("Training......")
for step in range(1400):
cost=model.train_on_batch(X_train,Y_train)
if step % 100 ==0:
print('train cost',cost)

### @ 测试数据测试训练结果。

biases和2越接近说明效果越好。
#5.Test
print("\n Testing...........")
cost=model.evaluate(X_test,Y_test,batch_size=40)
print("Test cost:",cost)
W,b=model.layers[0].get_weights()
print('Weight=',W,"\nbiases=",b)

### @ Keras模型结果 VS 原始测试数据结果

#6.Plotting the prediction
Y_pred=model.predict(X_test)
plt.scatter(X_test,Y_test)
plt.plot(X_test,Y_pred)
plt.show()

#### Keras中几个重要函数用法

2017-04-28 09:13:41

#### 基于Theano的深度学习(Deep Learning)框架Keras学习随笔-08-规则化(规格化)

2015-10-18 15:35:03

#### DeepLearning tutorial（6）易用的深度学习框架Keras简介

2015-05-07 13:40:06

#### Keras学习笔记（一）

2017-05-23 11:45:41

#### 易用的深度学习框架Keras简介及使用

2015-12-20 16:21:20

#### Keras.layers.core.dense()方法详解

2017-10-19 10:08:52

#### 基于Theano的深度学习(Deep Learning)框架Keras学习随笔-02-Example

2015-10-11 21:47:49

#### 深度学习（十）keras学习笔记

2015-10-13 10:38:19

#### 深度学习框架-Keras基础入门系列

2017年09月27日 22:11

#### 深度学习框架之Keras入门教程

2017-12-27 22:20:39