tensorflow的API一直较多,tf.contrib.layers.fully_connected
和tf.contrib.layers.linear
就是一个让人容易迷惑的点。这里fully_connected
相当于带激活层 (relu) 的linear
import tensorflow as tf
x = tf.get_variable('x', shape=[2,3], initializer=tf.constant_initializer(1))
y1 = tf.contrib.layers.fully_connected(x,2,weights_initializer=tf.contrib.layers.xavier_initializer(seed=1))
y2 = tf.contrib.layers.linear(x,2,weights_initializer=tf.contrib.layers.xavier_initializer(seed=1))
y3 = tf.contrib.layers.fully_connected(x,2,weights_initializer=tf.contrib.layers.xavier_initializer(seed=1), activation_fn=None)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print(sess.run(y1))
print(sess.run(y2))
print(sess.run(y3))
结果
[[-0. -0.]
[-0. -0.]]
[[-0.8214258 -0.12572539]
[-0.8214258 -0.12572539]]
[[-0.8214258 -0.12572539]
[-0.8214258 -0.12572539]]