Activation Functions
tf.sigmoid(x,name=None) # y = 1 / (1 + exp(-x)) ✨
tf.tanh(x,name=None) # 双曲线切线激活函数
tf.nn.relu(features, name=None) # 整流函数:max(features, 0) ✨
tf.nn.relu6(features,name=None) # 以6为阈值的整流函数: min(max(features, 0), 6)
tf.nn.elu(features, name=None) # elu函数, exp(features) - 1 if < 0, features otherwise
tf.nn.softplus(features, name=None) # 计算softplus: log(exp(features) + 1)
tf.nn.dropout(x,keep_prob,noise_shape=None,seed=None,name=None) # 计算dropout,keep_prob为keep概率, noise_shape为噪声的shape ✨
bias_add(value,bias,data_format=None,name=None) # 对value加一偏置量,此函数为tf.add的特殊情况,bias仅为一维,函数通过广播机制进行与value求和,数据格式可以与value不同,返回为与value相同格式
Convolution
tf.nn.convolution(input, filter, padding, strides=None, dilation_rate=None, name=None, data_format=None)
tf.nn.conv1d(input, filter, strides, padding, use_cudnn_on_gpu=None, data_format=None, name=None)
tf.nn.conv2d(input, filter, strides, padding, use_cudnn_on_gpu=None, data_format=None, name=None)
tf.nn.conv3d(input, filter, strides, padding, use_cudnn_on_gpu=None, data_format=None, name=None)
conv2d_transpose(value, filter, output_shape, strides, padding='SAME', data_format='NHWC', name=None)
Pooling
tf.nn.avg_pool(value, ksize, strides, padding, data_format=’NHWC’, name=None)
tf.nn.max_pool(value, ksize, strides, padding, data_format=’NHWC’, name=None)
tf.nn.max_pool_with_argmax(input, ksize, strides, padding, Targmax=None, name=None)
tf.nn.avg_pool3d(input, ksize, strides, padding, name=None)
tf.nn.max_pool3d(input, ksize, strides, padding, name=None)
Batch Normalization
tf.nn.l2_normalize(x, dim, epsilon=1e-12, name=None) # L2范式标准化
tf.nn.batch_normalization(x, mean, variance, offset, scale, variance_epsilon, name=None)
Classification
tf.nn.sigmoid_cross_entropy_with_logits(logits, targets, name=None)
tf.nn.softmax(logits, name=None)
tf.nn.softmax_cross_entropy_with_logits(logits, labels, name=None)
Loss
l2_loss(t,name=None)
Embeddings
embedding_lookup(params, ids, partition_strategy='mod', name=None, validate_indices=True, max_norm=None)
embedding_lookup_sparse(params, sp_ids, sp_weights, partition_strategy='mod', name=None, combiner=None, max_norm=None)
Recurrent Neural Networks
dynamic_rnn(cell, inputs, sequence_length=None, initial_state=None,
dtype=None, parallel_iterations=None, swap_memory=False, time_major=False,
scope=None)
bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, sequence_length=None, initial_state_fw=None, initial_state_bw=None, dtype=None, parallel_iterations=None, swap_memory=False, time_major=False, scope=None)
raw_rnn(cell, loop_fn, parallel_iterations=None, swap_memory=False, scope=None)