tensorflow中tf.get_variable的API为
def get_variable(name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=True,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None):
常用的参数有:
name:变量名称
shape:变量维度
initializer:变量初始化方式
regularizer:正规化
caching_device:可选的设备字符串或函数描述
其中,变量的初始化方式有
tf.constant_initializer–常量初始化
#!/usr/bin/python
# coding:utf-8
import tensorflow as tf
# 默认值为0
v1 = tf.get_variable('v1', shape=[5], initializer=tf.constant_initializer())
# 也可以指定初始化值
v2 = tf.get_variable('v2', shape=[5], initializer=tf.constant_initializer(9.))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print v1.eval()
print v2.eval()
输出:
[ 0. 0. 0. 0. 0.]
[ 9. 9. 9. 9. 9.]
tf.random_normal_initializer–正太分布初始化
#!/usr/bin/python
# coding:utf-8
import tensorflow as tf
# 随机正太分布初始化
v1 = tf.get_variable('v1', shape=[5], initializer=tf.random_normal_initializer())
# 指定正太分布的均值方差种子值
v2 = tf.get_variable('v2', shape=[5], initializer=tf.random_normal_initializer(mean=0, stddev=1., seed=1))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print v1.eval()
print v2.eval()
输出:
[-0.96925646 -1.34411633 1.27495158 0.10656819 -0.01502592]
[-0.81131822 1.48459876 0.06532937 -2.4427042 0.0992484 ]
tf.truncated_normal_initializer–截取正太分布
#!/usr/bin/python
# coding:utf-8
import tensorflow as tf
# 随机截取正太分布
v1 = tf.get_variable('v1', shape=[5], initializer=tf.truncated_normal_initializer())
# 指定截取正太分布的均值方差种子值
v2 = tf.get_variable('v2', shape=[5], initializer=tf.truncated_normal_initializer(mean=0, stddev=1., seed=1))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print v1.eval()
print v2.eval()
输出:
[ 0.33786374 -0.9326936 -0.27475813 0.07207271 1.80768931]
[-0.81131822 1.48459876 0.06532937 0.0992484 0.63969707]
tf.random_uniform_initializer–均匀分布
#!/usr/bin/python
# coding:utf-8
import tensorflow as tf
# 均匀分布
v1 = tf.get_variable('v1', shape=[5], initializer=tf.random_uniform_initializer())
# 指定均匀分布的上下限和种子值
v2 = tf.get_variable('v2', shape=[5], initializer=tf.random_uniform_initializer(maxval=-1., minval=1., seed=0))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print v1.eval()
print v2.eval()
输出:
[ 0.62208343 0.81940019 0.67432535 0.07955837 0.59436822]
[ 0.79827476 -0.9403336 -0.69752836 0.90343738 0.90295386]
tf.zeros_initializer–全0初始化
tf.ones_initializer–全1初始化
#!/usr/bin/python
# coding:utf-8
import tensorflow as tf
# 全0
v1 = tf.get_variable('v1', shape=[5], initializer=tf.zeros_initializer())
# 全1
v2 = tf.get_variable('v2', shape=[5], initializer=tf.ones_initializer())
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print v1.eval()
print v2.eval()
输出:
[ 0. 0. 0. 0. 0.]
[ 1. 1. 1. 1. 1.]