#import tensorflow as tf
import tensorflow as tf
tf.disable_v2_behavior()
# 在名字为foo的命名空间内创建名字为v的变量
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1], initializer=tf.constant_initializer(1.0))
'''
# 因为命名空间foo内已经存在变量v,再次创建则报错
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1])
# ValueError: Variable foo/v already exists, disallowed.
# Did you mean to set reuse=True in VarScope?
'''
# 将参数reuse参数设置为True,则tf.get_variable可直接获取已声明的变量
with tf.variable_scope("foo", reuse=True):
v1 = tf.compat.v1.get_variable("v", [1])
print(v == v1) # True
'''
# 当reuse=True时,tf.get_variable只能获取指定命名空间内的已创建的变量
with tf.variable_scope("bar", reuse=True):
v2 = tf.get_variable("v", [1])
# ValueError: Variable bar/v does not exist, or was not created with
# tf.get_variable(). Did you mean to set reuse=None in VarScope?
'''
with tf.variable_scope("root"):
# 通过tf.get_variable_scope().reuse函数获取当前上下文管理器内的reuse参数取值
print(tf.get_variable_scope().reuse) # False
with tf.variable_scope("foo1", reuse=True):
print(tf.get_variable_scope().reuse) # True
with tf.variable_scope("bar1"):
# 嵌套在上下文管理器foo1内的bar1内未指定reuse参数,则保持与外层一致
print(tf.get_variable_scope().reuse) # True
print(tf.get_variable_scope().reuse) # False
# tf.variable_scope函数提供了一个管理变量命名空间的方式
u1 = tf.get_variable("u", [1])
print(u1.name)
with tf.variable_scope("foou"):
u2 = tf.get_variable("u", [1])
print(u2.name)
with tf.variable_scope("foou"):
with tf.variable_scope("baru"):
u3 = tf.get_variable("u", [1])
print(u3.name)
u4 = tf.get_variable("u1", [1])
print(u4.name)
# 可直接通过带命名空间名称的变量名来获取其命名空间下的变量
with tf.variable_scope("", reuse=True):
u5 = tf.get_variable("foou/baru/u", [1])
print(u5.name)
print(u5 == u3)
u6 = tf.get_variable("foou/u1", [1])
print(u6.name)
print(u6 == u4)
上面这是源代码,但当我们运行第一次的时候没问题,第二次的时候会出现这样的错误:
我们可以在导入包后面加入以下代码 :
tf.reset_default_graph()
得到结果:
如果还不能解决,出现这个结果:ValueError: Variable XX/XX does not exist, or was not created with tf.get_variable().
ValueError: Variable XX/XX does not exist, or was not created with tf.get_variable()._普通网友的博客-CSDN博客
import tensorflow as tf
tf.reset_default_graph()#增加该语句
tf.disable_v2_behavior()
with tf.variable_scope("foo",reuse=tf.AUTO_REUSE):#增加,reuse=tf.AUTO_REUSE
v = tf.get_variable("v", [1], initializer=tf.constant_initializer(1.0))
with tf.variable_scope("foo", reuse=tf.AUTO_REUSE):#修改True为tf.AUTO_REUSE
v1 = tf.compat.v1.get_variable("v", [1])
print(v == v1) # True
with tf.variable_scope("root"):
print(tf.get_variable_scope().reuse)
with tf.variable_scope("foo1", reuse=tf.AUTO_REUSE):#修改True为tf.AUTO_REUSE
print(tf.get_variable_scope().reuse)
with tf.variable_scope("bar1"):
print(tf.get_variable_scope().reuse)
print(tf.get_variable_scope().reuse)
u1 = tf.get_variable("u", [1])
print(u1.name)
with tf.variable_scope("foou"):
u2 = tf.get_variable("u", [1])
print(u2.name)
with tf.variable_scope("foou"):
with tf.variable_scope("baru"):
u3 = tf.get_variable("u", [1])
print(u3.name)
u4 = tf.get_variable("u1", [1])
print(u4.name)
with tf.variable_scope("", reuse=True):
u5 = tf.get_variable("foou/baru/u", [1])
print(u5.name)
print(u5 == u3)
u6 = tf.get_variable("foou/u1", [1])
print(u6.name)
print(u6 == u4)