Tensorflow入门(谭秉峰)(四)Tensorflow中简单线性模型训练

import tensorflow as tf
import numpy as np

'''
numpy.random.rand(d0,d1,…,dn)
rand函数根据给定维度生成[0,1)之间的数据,包含0,不包含1
dn表格每个维度
返回值为指定维度的array 例如

np.random.rand(4,2)

array([[ 0.02173903,  0.44376568],
       [ 0.25309942,  0.85259262],
       [ 0.56465709,  0.95135013],
       [ 0.14145746,  0.55389458]])
'''
#使用numpy生成100个随机点
x_data=np.random.rand(100)
#y_data相当于标签值
y_data=x_data*0.1+0.2

#构造一个线性模型
b=tf.Variable(1.1)
k=tf.Variable(0.5)
y=x_data*k+b

#定义代价函数
#tf.reduce_mean()为求取样本损失函数的均值
loss=tf.reduce_mean(tf.square(y-y_data))
#定义梯度下降法进行训练的优化器
optimizer=tf.train.GradientDescentOptimizer(0.2)
#最小化代价函数
train=optimizer.minimize(loss)
#初始化变量
init=tf.global_variables_initializer()

with tf.Session() as sess:
    #变量初始化
    sess.run(init)
    for step in range(201):
        #进行迭代训练
        sess.run(train)
        #每迭代20次打印一次
        print(step,sess.run([k,b]))
    

输出结果: 

0 [0.27192336, 0.6618631]
1 [0.15923649, 0.4435339]
2 [0.10392349, 0.33454895]
3 [0.07712769, 0.27996296]
4 [0.06449628, 0.2524457]
5 [0.05889072, 0.2384028]
6 [0.056760818, 0.23107207]
7 [0.05634124, 0.22708969]
8 [0.05675441, 0.22478221]
9 [0.05756434, 0.22331703]
10 [0.05855465, 0.2222797]
11 [0.05961819, 0.22146386]
12 [0.060702123, 0.22076659]
13 [0.061780624, 0.2201365]
14 [0.06284128, 0.21954776]
15 [0.06387834, 0.21898733]
16 [0.06488937, 0.21844849]
17 [0.06587354, 0.2179277]
18 [0.06683084, 0.21742296]
19 [0.06776165, 0.21693312]
20 [0.06866652, 0.21645738]
21 [0.06954607, 0.21599519]
22 [0.07040098, 0.21554606]
23 [0.07123192, 0.21510957]
24 [0.07203953, 0.21468537]
25 [0.07282448, 0.21427308]
26 [0.073587395, 0.21387237]
27 [0.0743289, 0.21348293]
28 [0.07504958, 0.21310441]
29 [0.07575003, 0.21273652]
30 [0.07643082, 0.21237896]
31 [0.07709249, 0.21203144]
32 [0.07773559, 0.21169367]
33 [0.07836063, 0.21136539]
34 [0.07896813, 0.21104632]
35 [0.07955857, 0.21073622]
36 [0.08013244, 0.21043481]
37 [0.0806902, 0.21014187]
38 [0.081232294, 0.20985714]
39 [0.08175918, 0.2095804]
40 [0.08227126, 0.20931146]
41 [0.08276898, 0.20905004]
42 [0.08325271, 0.20879598]
43 [0.083722875, 0.20854904]
44 [0.08417983, 0.20830904]
45 [0.08462396, 0.20807578]
46 [0.08505563, 0.20784906]
47 [0.08547517, 0.2076287]
48 [0.08588294, 0.20741454]
49 [0.08627926, 0.20720638]
50 [0.08666445, 0.20700407]
51 [0.08703883, 0.20680743]
52 [0.0874027, 0.20661633]
53 [0.08775636, 0.20643058]
54 [0.08810008, 0.20625006]
55 [0.08843416, 0.2060746]
56 [0.088758856, 0.20590405]
57 [0.08907444, 0.2057383]
58 [0.08938116, 0.20557721]
59 [0.08967927, 0.20542063]
60 [0.08996902, 0.20526846]
61 [0.09025062, 0.20512055]
62 [0.09052432, 0.2049768]
63 [0.09079034, 0.20483708]
64 [0.09104889, 0.20470129]
65 [0.09130018, 0.20456931]
66 [0.09154442, 0.20444103]
67 [0.091781795, 0.20431635]
68 [0.09201251, 0.20419517]
69 [0.09223675, 0.2040774]
70 [0.092454694, 0.20396292]
71 [0.09266652, 0.20385167]
72 [0.0928724, 0.20374355]
73 [0.093072504, 0.20363845]
74 [0.09326699, 0.2035363]
75 [0.09345601, 0.20343703]
76 [0.09363972, 0.20334053]
77 [0.09381828, 0.20324676]
78 [0.093991816, 0.2031556]
79 [0.09416049, 0.20306702]
80 [0.094324425, 0.20298092]
81 [0.094483756, 0.20289724]
82 [0.094638616, 0.20281589]
83 [0.09478913, 0.20273684]
84 [0.094935425, 0.20266001]
85 [0.095077604, 0.20258532]
86 [0.0952158, 0.20251274]
87 [0.09535011, 0.2024422]
88 [0.09548065, 0.20237364]
89 [0.09560753, 0.202307]
90 [0.09573084, 0.20224224]
91 [0.09585069, 0.2021793]
92 [0.09596717, 0.20211811]
93 [0.09608039, 0.20205864]
94 [0.09619043, 0.20200086]
95 [0.096297376, 0.20194468]
96 [0.09640133, 0.20189008]
97 [0.096502356, 0.20183702]
98 [0.09660055, 0.20178545]
99 [0.09669598, 0.20173532]
100 [0.09678874, 0.2016866]
101 [0.09687889, 0.20163926]
102 [0.09696651, 0.20159325]
103 [0.09705167, 0.20154852]
104 [0.09713444, 0.20150505]
105 [0.097214885, 0.20146279]
106 [0.09729307, 0.20142172]
107 [0.09736907, 0.20138182]
108 [0.097442925, 0.20134303]
109 [0.09751471, 0.20130533]
110 [0.09758448, 0.20126867]
111 [0.097652294, 0.20123306]
112 [0.0977182, 0.20119844]
113 [0.09778226, 0.2011648]
114 [0.09784452, 0.20113209]
115 [0.09790503, 0.2011003]
116 [0.09796385, 0.20106941]
117 [0.09802101, 0.20103939]
118 [0.09807657, 0.20101021]
119 [0.09813057, 0.20098186]
120 [0.09818305, 0.20095429]
121 [0.09823406, 0.2009275]
122 [0.09828363, 0.20090146]
123 [0.09833182, 0.20087616]
124 [0.09837865, 0.20085156]
125 [0.09842417, 0.20082766]
126 [0.09846841, 0.20080443]
127 [0.098511405, 0.20078184]
128 [0.098553196, 0.20075989]
129 [0.09859381, 0.20073855]
130 [0.09863329, 0.20071782]
131 [0.09867166, 0.20069768]
132 [0.09870895, 0.20067808]
133 [0.0987452, 0.20065905]
134 [0.09878042, 0.20064054]
135 [0.09881466, 0.20062256]
136 [0.09884793, 0.20060508]
137 [0.098880276, 0.20058809]
138 [0.09891171, 0.20057158]
139 [0.098942265, 0.20055553]
140 [0.09897196, 0.20053995]
141 [0.09900083, 0.20052479]
142 [0.09902888, 0.20051005]
143 [0.09905614, 0.20049573]
144 [0.099082634, 0.20048182]
145 [0.09910839, 0.20046829]
146 [0.099133424, 0.20045514]
147 [0.09915775, 0.20044236]
148 [0.0991814, 0.20042995]
149 [0.09920438, 0.20041788]
150 [0.09922671, 0.20040615]
151 [0.099248424, 0.20039475]
152 [0.099269524, 0.20038366]
153 [0.09929003, 0.20037289]
154 [0.09930996, 0.20036241]
155 [0.09932933, 0.20035224]
156 [0.09934816, 0.20034236]
157 [0.099366456, 0.20033275]
158 [0.09938424, 0.2003234]
159 [0.099401526, 0.20031433]
160 [0.09941833, 0.2003055]
161 [0.09943466, 0.20029692]
162 [0.09945053, 0.2002886]
163 [0.09946595, 0.20028049]
164 [0.09948094, 0.20027262]
165 [0.099495515, 0.20026496]
166 [0.09950968, 0.20025752]
167 [0.09952345, 0.2002503]
168 [0.09953683, 0.20024326]
169 [0.09954983, 0.20023644]
170 [0.099562466, 0.2002298]
171 [0.09957475, 0.20022334]
172 [0.09958669, 0.20021707]
173 [0.099598296, 0.20021097]
174 [0.099609576, 0.20020506]
175 [0.099620536, 0.2001993]
176 [0.09963119, 0.2001937]
177 [0.09964155, 0.20018826]
178 [0.09965161, 0.20018297]
179 [0.099661395, 0.20017783]
180 [0.0996709, 0.20017284]
181 [0.09968014, 0.20016798]
182 [0.099689126, 0.20016328]
183 [0.09969785, 0.20015869]
184 [0.09970634, 0.20015423]
185 [0.099714585, 0.20014991]
186 [0.099722594, 0.20014569]
187 [0.09973038, 0.20014161]
188 [0.09973795, 0.20013763]
189 [0.0997453, 0.20013377]
190 [0.099752456, 0.20013002]
191 [0.09975941, 0.20012636]
192 [0.09976616, 0.20012282]
193 [0.09977272, 0.20011938]
194 [0.0997791, 0.20011602]
195 [0.0997853, 0.20011276]
196 [0.099791326, 0.2001096]
197 [0.09979718, 0.20010652]
198 [0.099802874, 0.20010352]
199 [0.09980841, 0.20010062]
200 [0.09981379, 0.2000978]

 

 

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值