在看slim代码中看待这样的结构
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(weight_decay)):
with slim.arg_scope([slim.conv2d], padding='SAME'):
with slim.arg_scope([slim.max_pool2d], padding='VALID') as arg_sc:
with slim.arg_scope(arg_sc):
看懵了,改下代码,打印出来。一看,明白了。
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(0.005)) as arg_sc0:
# print(arg_sc0)
"""
{'<function convolution2d at 0x7f42025f4268>':
{'activation_fn':
<function relu at 0x7f4206b3bae8>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f4278c62b70>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f425a9dd158>
},
'<function fully_connected at 0x7f42025fc1e0>':
{'activation_fn':
<function relu at 0x7f4206b3bae8>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f4278c62b70>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f425a9dd158>
}
}
"""
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc1:
# print(arg_sc1)
"""
如果有上面的with
{'<function convolution2d at 0x7f007d0e7378>':
{'activation_fn':
<function relu at 0x7f008161dbf8>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f00f3745b70>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f00d54c0158>,
'padding': 'SAME'
},
'<function fully_connected at 0x7f007d0e82f0>':
{'activation_fn':
<function relu at 0x7f008161dbf8>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f00f3745b70>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f00d54c0158>
}
}
如果没有上面的with
{'<function convolution2d at 0x7fd4c13bf510>': {'padding': 'SAME'}}
"""
with slim.arg_scope([slim.max_pool2d], padding='VALID') as arg_sc2:
# print(arg_sc2)
"""
如果有上面的with
{'<function convolution2d at 0x7f25c13661e0>':
{'activation_fn':
<function relu at 0x7f25c5873a60>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f26379a0b00>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f261971b158>,
'padding':
'SAME'},
'<function fully_connected at 0x7f25c12ed158>':
{'activation_fn':
<function relu at 0x7f25c5873a60>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f26379a0b00>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f261971b158>},
'<function max_pool2d at 0x7f25c12b5840>': {
'padding':
'VALID'}}
如果没有上面的with
{'<function max_pool2d at 0x7f12d65feb70>': {'padding': 'VALID'}}
"""
with slim.arg_scope(arg_sc2) as arg_sc:
print(arg_sc)
"""
{'<function convolution2d at 0x7f7f5c721488>':
{'activation_fn':
<function relu at 0x7f7f60c58d08>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f7fd2d81ba8>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f7fb4afc158>,
'padding':
'SAME'},
'<function fully_connected at 0x7f7f5c722400>':
{'activation_fn':
<function relu at 0x7f7f60c58d08>,
'biases_initializer':
<tensorflow.python.ops.init_ops.Constant object at 0x7f7fd2d81ba8>,
'weights_regularizer':
<function l2_regularizer.<locals>.l2 at 0x7f7fb4afc158>
},
'<function max_pool2d at 0x7f7f5c6a3ae8>':
{'padding':
'VALID'
}
}
"""