#[batch, 28, 28, 1]
shape = x_u_in.get_shape().as_list()
##[batch*N, 28, 28, M],y_u的shape是[batch,class_num]
y_tiled_u = tf.tile(y_u[:, None, None, :], [1, shape[1], shape[2], 1])
#[batch*N, 28, 28, 1]
x_u_in_tiled = tf.tile(x_u_in, [N, 1, 1, 1], name='x_u_in_tiled')
#x和y在最后一维上串联,相当于输入通道数增加
x_and_y_u = tf.concat((x_u_in_tiled, y_tiled_u), axis=3, name='x_and_y_u')
net = slim.conv2d(x_and_y_u, 32, [5, 5], scope='conv_zxy_mu_1')
net = slim.conv2d(net, 64, [5, 5], scope='conv_zxy_mu_2')
net = slim.conv2d(net, 64, [5, 5], scope='conv_zxy_mu_3')
mu_u = slim.fully_connected(slim.flatten(net), K, activation_fn=None,
scope='fc_zxy_mu')
net = slim.conv2d(x_and_y_u, 32, [5, 5], scope='conv_zxy_sigma_1')
net = slim.conv2d(net, 64, [5, 5], scope='conv_zxy_sigma_2')
net = slim.conv2d(net, 64, [5, 5], scope='conv_zxy_sigma_3')
#[batch*N,k]
sigma_u = slim.fully_connected(slim.flatten(net), K, activation_fn=None,
scope='fc_zxy_sigma')
卷积的输入里串联上标签y
最新推荐文章于 2020-03-27 22:36:06 发布