| | |
| | | softmax = nn.Softmax() |
| | | criterion = nn.CrossEntropyLoss().cuda() |
| | | reg_criterion = nn.MSELoss().cuda() |
| | | smooth_l1_loss = nn.SmoothL1Loss().cuda() |
| | | # Regression loss coefficient |
| | | alpha = args.alpha |
| | | |
| | |
| | | loss_seq = [] |
| | | for idx in xrange(1,len(angles)): |
| | | label_angles_residuals = label_angles - (angles[0] * 3 - 99) |
| | | # for idy in xrange(1,idx): |
| | | # label_angles_residuals += angles[idy] * 3 - 99 |
| | | label_angles_residuals = label_angles_residuals.detach() |
| | | loss_angles = reg_criterion(angles[idx], label_angles_residuals) |
| | | # Reconvert to other unit |
| | | label_angles_residuals = label_angles_residuals / 3.0 + 33 |
| | | loss_angles = smooth_l1_loss(angles[idx], label_angles_residuals) |
| | | loss_seq.append(loss_angles) |
| | | |
| | | grad_seq = [torch.Tensor(1).cuda(gpu) for _ in range(len(loss_seq))] |