loss.py 3.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798
  1. import torch
  2. import torch.nn.functional as F
  3. from .matcher import Yolov2Matcher
  4. from utils.box_ops import get_ious
  5. from utils.distributed_utils import get_world_size, is_dist_avail_and_initialized
  6. class SetCriterion(object):
  7. def __init__(self, cfg):
  8. self.cfg = cfg
  9. self.num_classes = cfg.num_classes
  10. self.loss_obj_weight = cfg.loss_obj
  11. self.loss_cls_weight = cfg.loss_cls
  12. self.loss_box_weight = cfg.loss_box
  13. # matcher
  14. self.matcher = Yolov2Matcher(cfg.iou_thresh, cfg.num_classes, cfg.anchor_sizes)
  15. def loss_objectness(self, pred_obj, gt_obj):
  16. loss_obj = F.binary_cross_entropy_with_logits(pred_obj, gt_obj, reduction='none')
  17. return loss_obj
  18. def loss_classes(self, pred_cls, gt_label):
  19. loss_cls = F.binary_cross_entropy_with_logits(pred_cls, gt_label, reduction='none')
  20. return loss_cls
  21. def loss_bboxes(self, pred_box, gt_box):
  22. # regression loss
  23. ious = get_ious(pred_box,
  24. gt_box,
  25. box_mode="xyxy",
  26. iou_type='giou')
  27. loss_box = 1.0 - ious
  28. return loss_box
  29. def __call__(self, outputs, targets):
  30. device = outputs['pred_cls'][0].device
  31. stride = outputs['stride']
  32. fmp_size = outputs['fmp_size']
  33. (
  34. gt_objectness,
  35. gt_classes,
  36. gt_bboxes,
  37. ) = self.matcher(fmp_size=fmp_size,
  38. stride=stride,
  39. targets=targets)
  40. # List[B, M, C] -> [B, M, C] -> [BM, C]
  41. pred_obj = outputs['pred_obj'].view(-1) # [B, M, 1] -> [BM,]
  42. pred_cls = outputs['pred_cls'].flatten(0, 1) # [B, M, C] -> [BM, C]
  43. pred_box = outputs['pred_box'].flatten(0, 1) # [B, M, 4] -> [BM, 4]
  44. gt_objectness = gt_objectness.view(-1).to(device).float() # [BM,]
  45. gt_classes = gt_classes.view(-1, self.num_classes).to(device).float() # [BM, C]
  46. gt_bboxes = gt_bboxes.view(-1, 4).to(device).float() # [BM, 4]
  47. pos_masks = (gt_objectness > 0)
  48. num_fgs = pos_masks.sum()
  49. if is_dist_avail_and_initialized():
  50. torch.distributed.all_reduce(num_fgs)
  51. num_fgs = (num_fgs / get_world_size()).clamp(1.0)
  52. # obj loss
  53. loss_obj = self.loss_objectness(pred_obj, gt_objectness)
  54. loss_obj = loss_obj.sum() / num_fgs
  55. # cls loss
  56. pred_cls_pos = pred_cls[pos_masks]
  57. gt_classes_pos = gt_classes[pos_masks]
  58. loss_cls = self.loss_classes(pred_cls_pos, gt_classes_pos)
  59. loss_cls = loss_cls.sum() / num_fgs
  60. # box loss
  61. pred_box_pos = pred_box[pos_masks]
  62. gt_bboxes_pos = gt_bboxes[pos_masks]
  63. loss_box = self.loss_bboxes(pred_box_pos, gt_bboxes_pos)
  64. loss_box = loss_box.sum() / num_fgs
  65. # total loss
  66. losses = self.loss_obj_weight * loss_obj + \
  67. self.loss_cls_weight * loss_cls + \
  68. self.loss_box_weight * loss_box
  69. loss_dict = dict(
  70. loss_obj = loss_obj,
  71. loss_cls = loss_cls,
  72. loss_box = loss_box,
  73. losses = losses
  74. )
  75. return loss_dict
  76. if __name__ == "__main__":
  77. pass