第三个好使,第一个没有加sigmoid,出来的loss奇奇怪怪
import tensorflow as tf
def focal_loss(pred, y, alpha=0.25, gamma=2):
"""Compute focal loss for predictions.
Multi-labels Focal loss formula:
FL = -alpha * (z-p)^gamma * log(p) -(1-alpha) * p^gamma * log(1-p)
,which alpha = 0.25, gamma = 2, p = sigmoid(x), z = target_tensor.
Args:
pred: A float tensor of shape [batch_size, num_anchors,
num_classes] representing the predicted logits for each class
y: A float tensor of shape [batch_size, num_anchors,
num_classes] representing one-hot encoded classification targets
alpha: A scalar tensor for focal loss alpha hyper-parameter
gamma: A scalar tensor for focal loss gamma hyper-parameter
Returns:
loss: A (scalar) tensor representing the value of the loss function
"""
zeros = tf.zeros_like(pred, dtype=pred.dtype)
# For positive prediction, only need consider front part loss, back part is 0;
# target_tensor > zeros <=> z=1, so positive coefficient = z - p.
pos_p_sub = tf.where(y > zeros, y - pred