-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlosses.py
68 lines (48 loc) · 1.85 KB
/
losses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import torch
import torch.nn as nn
import torch.nn.functional as F
try:
from LovaszSoftmax.pytorch.lovasz_losses import lovasz_hinge
except ImportError:
pass
__all__ = ['BCEDiceLoss', 'LovaszHingeLoss', 'TverskyLoss']
class BCEDiceLoss(nn.Module):
def __init__(self, weight=None):
super(BCEDiceLoss, self).__init__()
def forward(self, input, target):
bce = F.binary_cross_entropy_with_logits(input, target)
smooth = 1e-5
input = torch.sigmoid(input)
num = target.size(0)
input = input.view(num, -1)
target = target.view(num, -1)
intersection = (input * target)
dice = (2. * intersection.sum(1) + smooth) / (input.sum(1) + target.sum(1) + smooth)
dice = 1 - dice.sum() / num
return 0.5 * bce + dice
class LovaszHingeLoss(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input, target):
input = input.squeeze(1)
target = target.squeeze(1)
loss = lovasz_hinge(input, target, per_image=True)
return loss
ALPHA = 0.7
BETA = 0.3
class TverskyLoss(nn.Module):
def __init__(self, weight=None, size_average=True):
super(TverskyLoss, self).__init__()
def forward(self, inputs, targets, smooth=1, alpha=ALPHA, beta=BETA):
# comment out if the model contains a sigmoid or equivalent activation layer
# flatten label and prediction tensors
inputs = torch.sigmoid(inputs)
# flatten label and prediction tensors
inputs = inputs.view(-1)
targets = targets.view(-1)
# True Positives, False Positives & False Negatives
TP = (inputs * targets).sum()
FP = ((1 - targets) * inputs).sum()
FN = (targets * (1 - inputs)).sum()
Tversky = (TP + smooth) / (TP + alpha * FP + beta * FN + smooth)
return 1 - Tversky