Tensorflow高阶操作(张量)

标题合并与分割

tf.concat #拼接
tf.split #分割
tf.stack #
tf.unstack.

a = tf.ones([4,35,8])
b = tf.ones([1,35,8])
tf.concat([a,b], axis=0).shape
TensorShape([5, 35, 8])
a = tf.ones([4,35,8])
b = tf.ones([4,35,8])
tf.stack([a,b],axis=0).shape
TensorShape([2, 4, 35, 8])
tf.stack([a,b],axis=2).shape
TensorShape([4, 35, 2, 8])
c = tf.stack([a,b],axis=2)
a,b =tf.unstack(c,axis=2)
res =tf.unstack(c,axis=3)
res[0].shape
TensorShape([4, 35, 2])
len(res)
8
res =tf.split(c,axis =3, num_or_size_splits =2)
len(res)
2
res =tf.split(c,axis =3, num_or_size_splits =[2,2,4])
len(res)
3

数据统计
tf.norm(l,ord=,axis= ) #范数通常是二范数(平方和开根号)
tf.reduce_min/max
tf.argmax/argmin #最大最小位置
tf.equal
tf.unique

a = tf.ones([2,2])
tf.norm(a)
<tf.Tensor: shape=(), dtype=float32, numpy=2.0>
tf.sqrt(tf.reduce_sum(tf.square(a)))
<tf.Tensor: shape=(), dtype=float32, numpy=2.0>
tf.norm(a,ord=1,axis=0)
<tf.Tensor: shape=(2,), dtype=float32, numpy=array([2., 2.], dtype=float32)>
tf.reduce_sum(a),tf.reduce_mean(a),tf.reduce_max(a),tf.reduce_min(a)
(<tf.Tensor: shape=(), dtype=float32, numpy=0.13516426>, <tf.Tensor: shape=(), dtype=float32, numpy=0.0033791065>, <tf.Tensor: shape=(), dtype=float32, numpy=1.8307408>, <tf.Tensor: shape=(), dtype=float32, numpy=-2.484963>)
tf.reduce_sum(a,axis=1),tf.reduce_mean(a,axis=1),tf.reduce_max(a,axis=1),tf.reduce_min(a,axis=1)
(<tf.Tensor: shape=(4,), dtype=float32, numpy=array([ 3.6115484, -3.078033 , -3.283598 ,  2.8852468], dtype=float32)>, <tf.Tensor: shape=(4,), dtype=float32, numpy=array([ 0.36115485, -0.3078033 , -0.32835978,  0.2885247 ], dtype=float32)>, <tf.Tensor: shape=(4,), dtype=float32, numpy=array([1.681464 , 0.5672009, 1.5490466, 1.8307408], dtype=float32)>, <tf.Tensor: shape=(4,), dtype=float32, numpy=array([-1.3126731, -1.3005722, -1.4720294, -2.484963 ], dtype=float32)>)
a=tf.constant([0,1,2,3,5,6])
b=tf.range(6)
tf.equal(a,b)
<tf.Tensor: shape=(6,), dtype=bool, numpy=array([ True,  True,  True,  True, False, False])>
a = tf.range(5)
tf.unique(a)
Unique(y=<tf.Tensor: shape=(5,), dtype=int32, numpy=array([0, 1, 2, 3, 4])>, idx=<tf.Tensor: shape=(5,), dtype=int32, numpy=array([0, 1, 2, 3, 4])>)
b = tf.constant([4,2,5,4])
tf.unique(b)
Unique(y=<tf.Tensor: shape=(3,), dtype=int32, numpy=array([4, 2, 5])>, idx=<tf.Tensor: shape=(4,), dtype=int32, numpy=array([0, 1, 2, 0])>)

排序
sort/argsort
math.top_k

a =tf.random.shuffle(tf.range(5))#打散
a
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([3, 4, 1, 0, 2])>
tf.sort(a,direction="DESCENDING")
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([4, 3, 2, 1, 0])>
tf.sort(a,direction="ASCENDING")
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([0, 1, 2, 3, 4])>
tf.argsort(a,direction="DESCENDING")
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([1, 0, 4, 2, 3])>
idx=tf.argsort(a,direction="DESCENDING")
tf.gather(a,idx)
<tf.Tensor: shape=(5,), dtype=int32, numpy=array([4, 3, 2, 1, 0])>
a=tf.random.uniform([3,3],maxval=10,dtype=tf.int32)
a
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[4, 2, 5],
       [4, 0, 7],
       [3, 6, 3]])>
tf.sort(a)
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[2, 4, 5],
       [0, 4, 7],
       [3, 3, 6]])>
tf.sort(a,direction='DESCENDING')
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[5, 4, 2],
       [7, 4, 0],
       [6, 3, 3]])>
tf.argsort(a,direction='DESCENDING')
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[2, 0, 1],
       [2, 0, 1],
       [1, 0, 2]])>
       res =tf.math.top_k(a,2)
res.indices  #最大位置
<tf.Tensor: shape=(3, 2), dtype=int32, numpy=
array([[2, 0],
       [2, 0],
       [1, 0]])>
res.values  #最大的值
<tf.Tensor: shape=(3, 2), dtype=int32, numpy=
array([[5, 4],
       [7, 4],
       [6, 3]])>

填充与复制

pad[[第一个维度左,第一个维度右],[第二个维度左,第二个维度右]…]
tile 复制

a=tf.reshape(tf.range(9),[3,3])
a
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[0, 1, 2],
       [3, 4, 5],
       [6, 7, 8]])>
tf.pad(a,[[0,0],[0,0]])
<tf.Tensor: shape=(3, 3), dtype=int32, numpy=
array([[0, 1, 2],
       [3, 4, 5],
       [6, 7, 8]])>
tf.pad(a,[[1,0],[0,0]])
<tf.Tensor: shape=(4, 3), dtype=int32, numpy=
array([[0, 0, 0],
       [0, 1, 2],
       [3, 4, 5],
       [6, 7, 8]])>
tf.pad(a,[[1,1],[0,0]])
<tf.Tensor: shape=(5, 3), dtype=int32, numpy=
array([[0, 0, 0],
       [0, 1, 2],
       [3, 4, 5],
       [6, 7, 8],
       [0, 0, 0]])>
tf.pad(a,[[1,1],[0,1]])
<tf.Tensor: shape=(5, 4), dtype=int32, numpy=
array([[0, 0, 0, 0],
       [0, 1, 2, 0],
       [3, 4, 5, 0],
       [6, 7, 8, 0],
       [0, 0, 0, 0]])>
a=tf.random.normal([4,28,28,3])
tf.pad(a,[[0,0],[2,2],[2,2],[0,0]]).shape
TensorShape([4, 32, 32, 3])
a2=tf.random.normal([1,3,4]).shape
TensorShape([1, 3, 4])
a2=tf.tile(a2,[2,1,1])
TensorShape([2, 3, 4])
a2=tf.tile(a2,[2,2,1])
TensorShape([4, 6, 4])

张量限幅

clip_by_value
nn.relu函数当数值小于0时,取0,当数值大于0时,取本身
clip_by_norm根据范数裁剪
clip_by_global_norm根据范数等比例裁剪

a= tf.range(9)
a
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([0, 1, 2, 3, 4, 5, 6, 7, 8])>
a= tf.range(9)
a
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([0, 1, 2, 3, 4, 5, 6, 7, 8])>
tf.maximum(a,2)
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([2, 2, 2, 3, 4, 5, 6, 7, 8])>
tf.minimum(a,6)
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([0, 1, 2, 3, 4, 5, 6, 6, 6])>
tf.clip_by_value(a,2,6)
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([2, 2, 2, 3, 4, 5, 6, 6, 6])>
a=a-5
a
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([-5, -4, -3, -2, -1, 0, 1, 2, 3])>
tf.nn.relu(a)
<tf.Tensor: shape=(9,), dtype=int32, numpy=array([0, 0, 0, 0, 0, 0, 1, 2, 3])>
a=tf.random.normal([2,2],mean=10)
a
<tf.Tensor: shape=(2, 2), dtype=float32, numpy=
array([[ 9.141302 , 9.6661625],
[10.096088 , 9.558105 ]], dtype=float32)>
tf.norm(a)
<tf.Tensor: shape=(), dtype=float32, numpy=19.24283>
aa=tf.clip_by_norm(a,15)
tf.norm(aa)
<tf.Tensor: shape=(), dtype=float32, numpy=14.999999>
where

a =tf.random.normal([3,3])
a
<tf.Tensor: shape=(3, 3), dtype=float32, numpy=
array([[ 1.952879  , -0.06720538,  1.1356668 ],
       [-0.69901603,  0.41394553, -0.549109  ],
       [-1.386512  , -1.9713637 ,  0.35445404]], dtype=float32)>
mask=a>0
mask
<tf.Tensor: shape=(3, 3), dtype=bool, numpy=
array([[ True, False,  True],
       [False,  True, False],
       [False, False,  True]])>
tf.boolean_mask(a,mask)
<tf.Tensor: shape=(4,), dtype=float32, numpy=array([1.952879  , 1.1356668 , 0.41394553, 0.35445404], dtype=float32)>
indices = tf.where(mask)
indices
<tf.Tensor: shape=(4, 2), dtype=int64, numpy=
array([[0, 0],
       [0, 2],
       [1, 1],
       [2, 2]], dtype=int64)>
tf.gather_nd(a,indices)
<tf.Tensor: shape=(4,), dtype=float32, numpy=array([1.952879  , 1.1356668 , 0.41394553, 0.35445404], dtype=float32)>
#筛选
mask
<tf.Tensor: shape=(3, 3), dtype=bool, numpy=
array([[ True, False,  True],
       [False,  True, False],
       [False, False,  True]])>
a =tf.ones([3,3])
b=tf.zeros([3,3])
tf.where(mask,a,b)
<tf.Tensor: shape=(3, 3), dtype=float32, numpy=
array([[1., 0., 1.],

scatter_nd*更新
indices = tf.constant([[4],[3],[1],[7]])
updates =tf.constant([9,10,11,12])
shape=tf.constant([8])
tf.scatter_nd(indices,updates,shape)
<tf.Tensor: shape=(8,), dtype=int32, numpy=array([ 0, 11, 0, 10, 9, 0, 0, 12])>
tf.meshgrid

x=tf.linspace(-2.,2,5)
x.shape
TensorShape([5])
y=tf.linspace(-2.,2,5)
y
<tf.Tensor: shape=(5,), dtype=float32, numpy=array([-2., -1.,  0.,  1.,  2.], dtype=float32)>
px,py=tf.meshgrid(x,y)
px.shape
TensorShape([5, 5])
px
<tf.Tensor: shape=(5, 5), dtype=float32, numpy=
array([[-2., -1.,  0.,  1.,  2.],
       [-2., -1.,  0.,  1.,  2.],
       [-2., -1.,  0.,  1.,  2.],
       [-2., -1.,  0.,  1.,  2.],
       [-2., -1.,  0.,  1.,  2.]], dtype=float32)>
py
<tf.Tensor: shape=(5, 5), dtype=float32, numpy=
array([[-2., -2., -2., -2., -2.],
       [-1., -1., -1., -1., -1.],
       [ 0.,  0.,  0.,  0.,  0.],
       [ 1.,  1.,  1.,  1.,  1.],
       [ 2.,  2.,  2.,  2.,  2.]], dtype=float32)>
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值