Open Kunlei-Hong opened 1 year ago
class FocalLoss(nn.Module): def init(self, gamma=2, alpha=1.0, size_average=True): super(FocalLoss, self).init() self.gamma = gamma self.alpha = alpha self.size_average = size_average self.elipson = 1e-6
def forward(self, logits, labels,num_classes): label_onehot = F.one_hot(labels,num_classes=num_classes) log_p = F.log_softmax(logits,dim=-1) ce_loss = (log_p * label_onehot).sum(1) + self.elipson p = F.softmax(logits, dim=1) pt = (label_onehot * p).sum(1) + self.elipson sub_pt = 1 - pt l = -self.alpha * (sub_pt**self.gamma) * ce_loss if self.size_average: return l.mean() else: return l.sum()
++++++++++++++++++++++++ it seems like this
Thx I'll test it
class FocalLoss(nn.Module): def init(self, gamma=2, alpha=1.0, size_average=True): super(FocalLoss, self).init() self.gamma = gamma self.alpha = alpha self.size_average = size_average self.elipson = 1e-6
++++++++++++++++++++++++ it seems like this