目录
- tf.argmax()
- tf.reduce_mean()
tf.argmax(), 自带降低一维
import tensorflow as tf
import numpy as np
x = np.arange(24).reshape((2,3,4,1))
z = tf.argmax(x,axis = 3)
print(z.get_shape())
with tf.Session() as sess:
print(sess.run(z))
(2, 3, 4)
[[[0 0 0 0]
[0 0 0 0]
[0 0 0 0]]
[[0 0 0 0]
[0 0 0 0]
[0 0 0 0]]]
z = tf.argmax(x,axis = 2)
(2, 3, 1)
[[[3]
[3]
[3]]
[[3]
[3]
[3]]]
z = tf.argmax(x,axis = 1)
(2, 4, 1)
[[[2]
[2]
[2]
[2]]
[[2]
[2]
[2]
[2]]]
z = tf.argmax(x,axis = 0)
(3, 4, 1)
[[[1]
[1]
[1]
[1]]
[[1]
[1]
[1]
[1]]
[[1]
[1]
[1]
[1]]]
tf.reduce_mean()
import tensorflow as tf
import numpy as np
a = np.arange(8.0).reshape(2,2,2)
b = tf.reduce_mean(a,axis=[1,2],keepdims=True)
print(b.get_shape())
with tf.Session() as sess:
b=sess.run(b)
print(a)
print(b)
(2, 1, 1)
[[[0. 1.]
[2. 3.]]
[[4. 5.]
[6. 7.]]]
[[[1.5]]
[[5.5]]]
axis=0垂直屏幕。axis=1纵向,axis=2横向(计算机的传统)。axis=[1,2]就是垂直屏幕的平面各自去平均值了,可以分解为axis=1取平均,然后axis=2取平均。
import tensorflow as tf
import numpy as np
a = np.arange(8.0).reshape(2,2,2)
b = tf.reduce_mean(a,axis= 1,keepdims=True)
print(b.get_shape())
with tf.Session() as sess:
b=sess.run(b)
print(a)
print(b)
(2, 1, 2)
[[[0. 1.]
[2. 3.]]
[[4. 5.]
[6. 7.]]]
[[[1. 2.]]
[[5. 6.]]]
import tensorflow as tf
import numpy as np
a = np.ones((8,256,256,3))
b = tf.reduce_mean(a,axis=[1,2],keepdims=True)
print(b.get_shape())
(8, 1, 1, 3)
import tensorflow as tf
import numpy as np
a = np.ones((8,256,256,3))
b = tf.reduce_mean(a,axis=0)
print(b.get_shape())
(256, 256, 3)
import tensorflow as tf
import numpy as np
a = np.ones((8,256,256,3))
b = tf.reduce_mean(a,axis=[0,3],keepdims=True)
print(b.get_shape())
(1, 256, 256, 1)
更多数学原理小文请关注公众号:未名方略