I have the custom loss function that I am trying to use on my model however when I use the loss.backward() in Pytorch is not working.
This is the loss function:
class Neg_Pearson(nn.Module): # Pearson range [-1, 1] so if < 0, abs|loss| ; if >0, 1- loss
def __init__(self):
super(Neg_Pearson,self).__init__()
return
def forward(self, preds, labels): # tensor [Batch, Temporal]
loss = 0
for i in range(preds.shape[0]):
sum_x = torch.sum(preds[i]) # x
sum_y = torch.sum(labels[i]) # y
sum_xy = torch.sum(preds[i]*labels[i]) # xy
sum_x2 = torch.sum(torch.pow(preds[i],2)) # x^2
sum_y2 = torch.sum(torch.pow(labels[i],2)) # y^2
N = preds.shape[1]
pearson = (N*sum_xy - sum_x*sum_y)/(torch.sqrt((N*sum_x2 - torch.pow(sum_x,2))*(N*sum_y2 - torch.pow(sum_y,2))))
loss += 1 - pearson
loss = loss.tolist()
loss = loss/preds.shape[0]
#print(loss)
return loss
When I try to use it with my model like so:
yp = (yp-torch.mean(yp)) /torch.std(yp) # normalize
yt = (yt-torch.mean(yt)) /torch.std(yt) # normalize
loss = neg_pears_loss(yp, yt)
print(loss)
optimizer.zero_grad()
loss.backward()
optimizer.step()
This is the error I am getting:
AttributeError: 'float' object has no attribute 'backward'
any suggestions on how do I fix this issue?