https://blog.csdn.net/wangjuncode/article/details/94209799
# 对于如下的普通网络:
import mxnet as mx
import mxnet.ndarray as nd
def mxnet_symbol_demo():
data = mx.sym.Variable('data')
# layer1
conv1 = mx.sym.Convolution(data=data, kernel=(5,5), num_filter=32,name="conv1")
relu1 = mx.sym.Activation(data=conv1,act_type="relu",name="relu1")
pool1 = mx.sym.Pooling(data=relu1,pool_type="max",kernel=(2,2),stride=(2,2),name="pool1")
# layer2
conv2 = mx.sym.Convolution(data=pool1, kernel=(3,3), num_filter=64,name="conv2")
relu2 = mx.sym.Activation(data=conv2,act_type="relu",name="relu2")
pool2 = mx.sym.Pooling(data=relu2,pool_type="max",kernel=(2,2),stride=(2,2),name="pool2")
# layer3
fc1 = mx.symbol.FullyConnected(data=mx.sym.flatten(pool2), num_hidden=256,name="fc1")
relu3 = mx.sym.Activation(data=fc1, act_type="relu",name="relu3")
# layer4
fc2 = mx.symbol.FullyConnected(data=relu3, num_hidden=10,name="fc2")
out = mx.sym.SoftmaxOutput(data=fc2, label=mx.sym.Variable("label"),name='softmax')
return out
if __name__=="__main__":
sym=mxnet_symbol_demo()
out_list=sym.get_internals().list_outputs()
print(out_list)
# result:
# ['data', 'conv1_weight', 'conv1_bias', 'conv1_output',
# 'relu1_output', 'pool1_output', 'conv2_weight', 'conv2_bias',
# 'conv2_output', 'relu2_output', 'pool2_output', 'flatten0_output',
# 'fc1_weight', 'fc1_bias', 'fc1_output', 'relu3_output', 'fc2_weight',
# 'fc2_bias', 'fc2_output', 'label', 'softmax_output']
————————————————
版权声明:本文为CSDN博主「SeekingWJ」的原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接及本声明。
原文链接:https://blog.csdn.net/wangjuncode/article/details/94209799