(8)tensorflow合并与分割

合并与分割

功能代码函数
合并之拼接tf.concat(tensors,axis)
合并之堆叠tf.stack(tensors, axis)
分割tf.split(x,num_or_size_split,axis)
分割tf.unstack(x,axis)

合并

拼接

  • tf.concat(tensors,axis) tensors表示要拼接的张量,使用列表传入,axis指定拼接维度
  • 注意:非拼接维度的元素长度要相同,否则报错。
import tensorflow as tf
a = tf.random.normal([4,5,3])
b = tf.random.normal([6,5,3])
c = tf.concat([a,b],axis=0)
print(a)
print(b)
print(c)

out:

tf.Tensor(
[[[-0.39625445  0.3819762   1.2027841 ]
  [ 0.334098    0.7432584   0.6183441 ]
  [ 0.03417419  0.01321491  1.0078253 ]
  [ 1.4237499  -1.827257   -1.308018  ]
  [-0.35503232 -2.6376138   0.98315465]]

 [[-1.5407085   0.12017757 -0.59515864]
  [ 2.094839   -0.18196803  1.6049206 ]
  [-1.3181221   1.7851645  -1.5995227 ]
  [-0.04517302  1.6453824  -0.5989419 ]
  [ 2.2515132  -0.62372136 -0.06089867]]

 [[ 1.3461647  -1.385781   -0.48966554]
  [-0.41696066 -0.90037787  2.291636  ]
  [-1.7008727   1.7226149  -0.786153  ]
  [-1.2762736   0.332516   -0.14810249]
  [ 1.0592614   0.2533893   0.5841308 ]]

 [[-0.42902058  0.43225735  0.85556847]
  [-1.7830418  -1.3853345   0.890129  ]
  [-0.63319314 -1.2618272   0.44464034]
  [ 0.4910915   0.8201667  -1.3423263 ]
  [ 0.63449687 -1.190244   -1.057467  ]]], shape=(4, 5, 3), dtype=float32)
tf.Tensor(
[[[ 0.66030663  0.8512739  -0.8486435 ]
  [ 0.00603609 -0.09058952 -1.1434454 ]
  [-0.04409889 -2.0244348  -1.2657875 ]
  [-1.8814467   0.01734681  0.14282931]
  [ 0.4124618  -0.18742435  1.6374197 ]]

 [[-1.1297064  -1.1038694   1.0977235 ]
  [-1.6643121  -0.41950858 -1.3071313 ]
  [ 0.6732128   0.2648344  -0.71164423]
  [-2.4692547  -0.00817256 -0.9665499 ]
  [-0.7805751  -0.47890818 -1.087723  ]]

 [[ 0.7077118  -1.0909817   2.1622958 ]
  [ 1.4824893  -0.26602545  0.3897919 ]
  [-0.4684151   0.4919931  -0.03146958]
  [-0.08031331 -0.24798968 -2.4943614 ]
  [-0.5044665  -1.4761853  -0.17432967]]

 [[-0.01854223 -1.9119867  -0.40005997]
  [-1.56136    -0.18466255  1.7343273 ]
  [-1.6172893  -1.3799937  -0.36896563]
  [ 1.3052849   0.5668918   0.14236486]
  [ 0.02620716 -1.2010809  -2.4494224 ]]

 [[ 2.4470851  -1.3787079  -0.35644615]
  [ 1.7023886  -2.2372851  -0.88650274]
  [ 1.8084188   1.282095    0.67697704]
  [-0.21578562  0.08279765 -0.18176055]
  [ 0.5063544  -1.6944374  -1.7155305 ]]

 [[-0.24860623  0.31548622  0.16770023]
  [-0.59958047 -0.28930587  1.8078712 ]
  [-0.33258387  0.83143383 -0.36458698]
  [-0.87004584  2.4647048  -0.82852066]
  [-1.4280477   0.44560102 -1.2426772 ]]], shape=(6, 5, 3), dtype=float32)
tf.Tensor(
[[[-0.39625445  0.3819762   1.2027841 ]
  [ 0.334098    0.7432584   0.6183441 ]
  [ 0.03417419  0.01321491  1.0078253 ]
  [ 1.4237499  -1.827257   -1.308018  ]
  [-0.35503232 -2.6376138   0.98315465]]

 [[-1.5407085   0.12017757 -0.59515864]
  [ 2.094839   -0.18196803  1.6049206 ]
  [-1.3181221   1.7851645  -1.5995227 ]
  [-0.04517302  1.6453824  -0.5989419 ]
  [ 2.2515132  -0.62372136 -0.06089867]]

 [[ 1.3461647  -1.385781   -0.48966554]
  [-0.41696066 -0.90037787  2.291636  ]
  [-1.7008727   1.7226149  -0.786153  ]
  [-1.2762736   0.332516   -0.14810249]
  [ 1.0592614   0.2533893   0.5841308 ]]

 [[-0.42902058  0.43225735  0.85556847]
  [-1.7830418  -1.3853345   0.890129  ]
  [-0.63319314 -1.2618272   0.44464034]
  [ 0.4910915   0.8201667  -1.3423263 ]
  [ 0.63449687 -1.190244   -1.057467  ]]

 [[ 0.66030663  0.8512739  -0.8486435 ]
  [ 0.00603609 -0.09058952 -1.1434454 ]
  [-0.04409889 -2.0244348  -1.2657875 ]
  [-1.8814467   0.01734681  0.14282931]
  [ 0.4124618  -0.18742435  1.6374197 ]]

 [[-1.1297064  -1.1038694   1.0977235 ]
  [-1.6643121  -0.41950858 -1.3071313 ]
  [ 0.6732128   0.2648344  -0.71164423]
  [-2.4692547  -0.00817256 -0.9665499 ]
  [-0.7805751  -0.47890818 -1.087723  ]]

 [[ 0.7077118  -1.0909817   2.1622958 ]
  [ 1.4824893  -0.26602545  0.3897919 ]
  [-0.4684151   0.4919931  -0.03146958]
  [-0.08031331 -0.24798968 -2.4943614 ]
  [-0.5044665  -1.4761853  -0.17432967]]

 [[-0.01854223 -1.9119867  -0.40005997]
  [-1.56136    -0.18466255  1.7343273 ]
  [-1.6172893  -1.3799937  -0.36896563]
  [ 1.3052849   0.5668918   0.14236486]
  [ 0.02620716 -1.2010809  -2.4494224 ]]

 [[ 2.4470851  -1.3787079  -0.35644615]
  [ 1.7023886  -2.2372851  -0.88650274]
  [ 1.8084188   1.282095    0.67697704]
  [-0.21578562  0.08279765 -0.18176055]
  [ 0.5063544  -1.6944374  -1.7155305 ]]

 [[-0.24860623  0.31548622  0.16770023]
  [-0.59958047 -0.28930587  1.8078712 ]
  [-0.33258387  0.83143383 -0.36458698]
  [-0.87004584  2.4647048  -0.82852066]
  [-1.4280477   0.44560102 -1.2426772 ]]], shape=(10, 5, 3), dtype=float32)


堆叠

  • tf.stack(tensors,axis) tensors表示要拼堆叠的张量,使用列表传入
  • axis指定创建在那个维度上堆叠张量
  • 注意:张量的shape要相同,否则报错。
  • 会创建一个新维度axis,用于区分每个堆叠张量
import tensorflow as tf
a = tf.random.normal([5,3])
b = tf.random.normal([5,3])
c = tf.stack([a,b],axis=0)
print(a)
print(b)
print(c)

out:

tf.Tensor(
[[ 1.3300418  -0.4461302   0.20549034]
 [ 0.9952209  -1.7955463   0.3990858 ]
 [ 1.6510614  -0.21783236  3.460978  ]
 [ 1.3523902   0.5391921  -1.0611141 ]
 [-0.9559927   0.80054224  0.38005185]], shape=(5, 3), dtype=float32)
tf.Tensor(
[[ 0.41561583 -1.0130575  -0.10967913]
 [-2.0321953   1.1032525   0.08715498]
 [-0.6097394   1.0935271   0.73701453]
 [-0.5774029  -0.94618404  0.23608516]
 [-2.0131488   0.7908683   0.17947574]], shape=(5, 3), dtype=float32)
tf.Tensor(
[[[ 1.3300418  -0.4461302   0.20549034]
  [ 0.9952209  -1.7955463   0.3990858 ]
  [ 1.6510614  -0.21783236  3.460978  ]
  [ 1.3523902   0.5391921  -1.0611141 ]
  [-0.9559927   0.80054224  0.38005185]]

 [[ 0.41561583 -1.0130575  -0.10967913]
  [-2.0321953   1.1032525   0.08715498]
  [-0.6097394   1.0935271   0.73701453]
  [-0.5774029  -0.94618404  0.23608516]
  [-2.0131488   0.7908683   0.17947574]]], shape=(2, 5, 3), dtype=float32)

分割

  • tf.split(x,num_or_size_split,axis)
  • x为待分割张量
  • num_or_size_split为分割数量,标量形式传入为均分,列表形式传入可表示每份长度,如[2,4,5,6],表示为分割为长度分别为2,4,5,6的4份,2+4+5+6等于第4维度的长度,如下例代码1+3等于第1维度长度4,2+3等于第2维度长度5
  • tf.unstack(x,axis),分割并且分割维度擦除
  • axis表示指定分割的维度索引号
import tensorflow as tf
a = tf.random.normal([4,5,3])
result = tf.split(a,num_or_size_splits=4,axis=0)
print(result)
print(result[1])
result1 = tf.split(a,num_or_size_splits=[1,3],axis=0)
print(result1)
print(result1[1])
result2 = tf.split(a,num_or_size_splits=[2,3],axis=1)
print(result2)
print(result2[1])

out:
[<tf.Tensor: id=8, shape=(1, 5, 3), dtype=float32, numpy=
array([[[ 0.3904892 , -1.105881  , -0.7082881 ],
        [ 0.4949667 ,  0.84043664, -0.68348324],
        [-0.22346756, -2.6907673 ,  0.17591892],
        [-0.7161378 ,  0.7036343 , -1.8668156 ],
        [ 0.36962324, -0.50048685, -0.24732415]]], dtype=float32)>, <tf.Tensor: id=9, shape=(1, 5, 3), dtype=float32, numpy=
array([[[-0.30191737, -0.0259735 ,  0.7174729 ],
        [-1.3813697 , -1.4051404 ,  1.8214988 ],
        [-0.8635123 , -1.3751632 , -0.14479056],
        [-2.250883  , -0.31247318, -0.58550864],
        [-1.71185   ,  1.3363655 , -0.07352316]]], dtype=float32)>, <tf.Tensor: id=10, shape=(1, 5, 3), dtype=float32, numpy=
array([[[ 2.473549  ,  0.78311926, -0.21324241],
        [-1.127054  , -0.26533926, -1.4848714 ],
        [ 1.0042518 , -0.4744549 , -1.0831544 ],
        [ 1.5913504 , -1.9893705 ,  0.916816  ],
        [-0.755542  ,  0.9349252 ,  0.21747041]]], dtype=float32)>, <tf.Tensor: id=11, shape=(1, 5, 3), dtype=float32, numpy=
array([[[-0.6105256 , -0.4558208 , -0.66300654],
        [-0.37005875,  1.0546304 ,  1.3110442 ],
        [ 0.30050093, -1.0673137 ,  1.048935  ],
        [-0.3111932 , -0.8018419 , -0.15355548],
        [ 1.2823166 , -0.03827742,  1.313077  ]]], dtype=float32)>]
tf.Tensor(
[[[-0.30191737 -0.0259735   0.7174729 ]
  [-1.3813697  -1.4051404   1.8214988 ]
  [-0.8635123  -1.3751632  -0.14479056]
  [-2.250883   -0.31247318 -0.58550864]
  [-1.71185     1.3363655  -0.07352316]]], shape=(1, 5, 3), dtype=float32)
[<tf.Tensor: id=14, shape=(1, 5, 3), dtype=float32, numpy=
array([[[ 0.3904892 , -1.105881  , -0.7082881 ],
        [ 0.4949667 ,  0.84043664, -0.68348324],
        [-0.22346756, -2.6907673 ,  0.17591892],
        [-0.7161378 ,  0.7036343 , -1.8668156 ],
        [ 0.36962324, -0.50048685, -0.24732415]]], dtype=float32)>, <tf.Tensor: id=15, shape=(3, 5, 3), dtype=float32, numpy=
array([[[-0.30191737, -0.0259735 ,  0.7174729 ],
        [-1.3813697 , -1.4051404 ,  1.8214988 ],
        [-0.8635123 , -1.3751632 , -0.14479056],
        [-2.250883  , -0.31247318, -0.58550864],
        [-1.71185   ,  1.3363655 , -0.07352316]],

       [[ 2.473549  ,  0.78311926, -0.21324241],
        [-1.127054  , -0.26533926, -1.4848714 ],
        [ 1.0042518 , -0.4744549 , -1.0831544 ],
        [ 1.5913504 , -1.9893705 ,  0.916816  ],
        [-0.755542  ,  0.9349252 ,  0.21747041]],

       [[-0.6105256 , -0.4558208 , -0.66300654],
        [-0.37005875,  1.0546304 ,  1.3110442 ],
        [ 0.30050093, -1.0673137 ,  1.048935  ],
        [-0.3111932 , -0.8018419 , -0.15355548],
        [ 1.2823166 , -0.03827742,  1.313077  ]]], dtype=float32)>]
tf.Tensor(
[[[-0.30191737 -0.0259735   0.7174729 ]
  [-1.3813697  -1.4051404   1.8214988 ]
  [-0.8635123  -1.3751632  -0.14479056]
  [-2.250883   -0.31247318 -0.58550864]
  [-1.71185     1.3363655  -0.07352316]]

 [[ 2.473549    0.78311926 -0.21324241]
  [-1.127054   -0.26533926 -1.4848714 ]
  [ 1.0042518  -0.4744549  -1.0831544 ]
  [ 1.5913504  -1.9893705   0.916816  ]
  [-0.755542    0.9349252   0.21747041]]

 [[-0.6105256  -0.4558208  -0.66300654]
  [-0.37005875  1.0546304   1.3110442 ]
  [ 0.30050093 -1.0673137   1.048935  ]
  [-0.3111932  -0.8018419  -0.15355548]
  [ 1.2823166  -0.03827742  1.313077  ]]], shape=(3, 5, 3), dtype=float32)
[<tf.Tensor: id=18, shape=(4, 2, 3), dtype=float32, numpy=
array([[[ 0.3904892 , -1.105881  , -0.7082881 ],
        [ 0.4949667 ,  0.84043664, -0.68348324]],

       [[-0.30191737, -0.0259735 ,  0.7174729 ],
        [-1.3813697 , -1.4051404 ,  1.8214988 ]],

       [[ 2.473549  ,  0.78311926, -0.21324241],
        [-1.127054  , -0.26533926, -1.4848714 ]],

       [[-0.6105256 , -0.4558208 , -0.66300654],
        [-0.37005875,  1.0546304 ,  1.3110442 ]]], dtype=float32)>, <tf.Tensor: id=19, shape=(4, 3, 3), dtype=float32, numpy=
array([[[-0.22346756, -2.6907673 ,  0.17591892],
        [-0.7161378 ,  0.7036343 , -1.8668156 ],
        [ 0.36962324, -0.50048685, -0.24732415]],

       [[-0.8635123 , -1.3751632 , -0.14479056],
        [-2.250883  , -0.31247318, -0.58550864],
        [-1.71185   ,  1.3363655 , -0.07352316]],

       [[ 1.0042518 , -0.4744549 , -1.0831544 ],
        [ 1.5913504 , -1.9893705 ,  0.916816  ],
        [-0.755542  ,  0.9349252 ,  0.21747041]],

       [[ 0.30050093, -1.0673137 ,  1.048935  ],
        [-0.3111932 , -0.8018419 , -0.15355548],
        [ 1.2823166 , -0.03827742,  1.313077  ]]], dtype=float32)>]
tf.Tensor(
[[[-0.22346756 -2.6907673   0.17591892]
  [-0.7161378   0.7036343  -1.8668156 ]
  [ 0.36962324 -0.50048685 -0.24732415]]

 [[-0.8635123  -1.3751632  -0.14479056]
  [-2.250883   -0.31247318 -0.58550864]
  [-1.71185     1.3363655  -0.07352316]]

 [[ 1.0042518  -0.4744549  -1.0831544 ]
  [ 1.5913504  -1.9893705   0.916816  ]
  [-0.755542    0.9349252   0.21747041]]

 [[ 0.30050093 -1.0673137   1.048935  ]
  [-0.3111932  -0.8018419  -0.15355548]
  [ 1.2823166  -0.03827742  1.313077  ]]], shape=(4, 3, 3), dtype=float32)

请与上面方法比较shape值
import tensorflow as tf
a = tf.random.normal([4,5,3])
result = tf.unstack(a,axis=0)
print(result)

out:
[<tf.Tensor: id=6, shape=(5, 3), dtype=float32, numpy=
array([[-0.63918597, -0.7819123 ,  0.79048645],
       [ 0.3064155 , -1.3274609 ,  0.9028482 ],
       [-0.00903583, -0.32057858, -2.7755013 ],
       [ 0.09132141,  0.08776158, -0.4441764 ],
       [-0.70847946,  2.461596  ,  0.84051126]], dtype=float32)>, <tf.Tensor: id=7, shape=(5, 3), dtype=float32, numpy=
array([[-2.252057  ,  1.1810895 , -0.7220576 ],
       [ 0.69581914, -0.7567    ,  0.8683886 ],
       [ 0.6389015 ,  0.8643613 , -1.9073122 ],
       [ 0.26840648, -0.00836617,  0.30218175],
       [-0.59653777,  0.5698503 ,  0.79760593]], dtype=float32)>, <tf.Tensor: id=8, shape=(5, 3), dtype=float32, numpy=
array([[ 0.5377473 ,  0.0543653 , -0.8140486 ],
       [-0.07407385,  1.0649898 , -0.6257663 ],
       [ 0.7626077 ,  1.325773  ,  0.32939437],
       [-0.53121316,  0.5452042 , -0.4838635 ],
       [-1.9551233 ,  0.16686492, -0.8523249 ]], dtype=float32)>, <tf.Tensor: id=9, shape=(5, 3), dtype=float32, numpy=
array([[-2.2233799 ,  0.68255967,  0.4995651 ],
       [ 0.86839   ,  1.4954503 ,  0.73637474],
       [-1.0587592 , -0.9949351 , -0.33885202],
       [ 0.483398  , -0.7528148 , -0.3674827 ],
       [ 0.29438716,  0.18951854,  1.0799344 ]], dtype=float32)>]


  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

小蜗笔记

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值