matcher.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. import numpy as np
  2. import torch
  3. class Yolov5Matcher(object):
  4. def __init__(self, num_classes, num_anchors, anchor_size, anchor_theshold):
  5. self.num_classes = num_classes
  6. self.num_anchors = num_anchors
  7. self.anchor_theshold = anchor_theshold
  8. # [KA, 2]
  9. self.anchor_sizes = np.array([[anchor[0], anchor[1]]
  10. for anchor in anchor_size])
  11. # [KA, 4]
  12. self.anchor_boxes = np.array([[0., 0., anchor[0], anchor[1]]
  13. for anchor in anchor_size])
  14. def compute_iou(self, anchor_boxes, gt_box):
  15. """
  16. anchor_boxes : ndarray -> [KA, 4] (cx, cy, bw, bh).
  17. gt_box : ndarray -> [1, 4] (cx, cy, bw, bh).
  18. """
  19. # anchors: [KA, 4]
  20. anchors = np.zeros_like(anchor_boxes)
  21. anchors[..., :2] = anchor_boxes[..., :2] - anchor_boxes[..., 2:] * 0.5 # x1y1
  22. anchors[..., 2:] = anchor_boxes[..., :2] + anchor_boxes[..., 2:] * 0.5 # x2y2
  23. anchors_area = anchor_boxes[..., 2] * anchor_boxes[..., 3]
  24. # gt_box: [1, 4] -> [KA, 4]
  25. gt_box = np.array(gt_box).reshape(-1, 4)
  26. gt_box = np.repeat(gt_box, anchors.shape[0], axis=0)
  27. gt_box_ = np.zeros_like(gt_box)
  28. gt_box_[..., :2] = gt_box[..., :2] - gt_box[..., 2:] * 0.5 # x1y1
  29. gt_box_[..., 2:] = gt_box[..., :2] + gt_box[..., 2:] * 0.5 # x2y2
  30. gt_box_area = np.prod(gt_box[..., 2:] - gt_box[..., :2], axis=1)
  31. # intersection
  32. inter_w = np.minimum(anchors[:, 2], gt_box_[:, 2]) - \
  33. np.maximum(anchors[:, 0], gt_box_[:, 0])
  34. inter_h = np.minimum(anchors[:, 3], gt_box_[:, 3]) - \
  35. np.maximum(anchors[:, 1], gt_box_[:, 1])
  36. inter_area = inter_w * inter_h
  37. # union
  38. union_area = anchors_area + gt_box_area - inter_area
  39. # iou
  40. iou = inter_area / union_area
  41. iou = np.clip(iou, a_min=1e-10, a_max=1.0)
  42. return iou
  43. def iou_assignment(self, ctr_points, gt_box, fpn_strides):
  44. # compute IoU
  45. iou = self.compute_iou(self.anchor_boxes, gt_box)
  46. iou_mask = (iou > 0.5)
  47. label_assignment_results = []
  48. if iou_mask.sum() == 0:
  49. # We assign the anchor box with highest IoU score.
  50. iou_ind = np.argmax(iou)
  51. level = iou_ind // self.num_anchors # pyramid level
  52. anchor_idx = iou_ind - level * self.num_anchors # anchor index
  53. # get the corresponding stride
  54. stride = fpn_strides[level]
  55. # compute the grid cell
  56. xc, yc = ctr_points
  57. xc_s = xc / stride
  58. yc_s = yc / stride
  59. grid_x = int(xc_s)
  60. grid_y = int(yc_s)
  61. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  62. else:
  63. for iou_ind, iou_m in enumerate(iou_mask):
  64. if iou_m:
  65. level = iou_ind // self.num_anchors # pyramid level
  66. anchor_idx = iou_ind - level * self.num_anchors # anchor index
  67. # get the corresponding stride
  68. stride = fpn_strides[level]
  69. # compute the gride cell
  70. xc_s = xc / stride
  71. yc_s = yc / stride
  72. grid_x = int(xc_s)
  73. grid_y = int(yc_s)
  74. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  75. return label_assignment_results
  76. def aspect_ratio_assignment(self, ctr_points, keeps, fpn_strides):
  77. label_assignment_results = []
  78. for keep_idx, keep in enumerate(keeps):
  79. if keep:
  80. level = keep_idx // self.num_anchors # pyramid level
  81. anchor_idx = keep_idx - level * self.num_anchors # anchor index
  82. # get the corresponding stride
  83. stride = fpn_strides[level]
  84. # compute the gride cell
  85. xc, yc = ctr_points
  86. xc_s = xc / stride
  87. yc_s = yc / stride
  88. grid_x = int(xc_s)
  89. grid_y = int(yc_s)
  90. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  91. return label_assignment_results
  92. @torch.no_grad()
  93. def __call__(self, fmp_sizes, fpn_strides, targets):
  94. """
  95. fmp_size: (List) [fmp_h, fmp_w]
  96. fpn_strides: (List) -> [8, 16, 32, ...] stride of network output.
  97. targets: (Dict) dict{'boxes': [...],
  98. 'labels': [...],
  99. 'orig_size': ...}
  100. """
  101. assert len(fmp_sizes) == len(fpn_strides)
  102. # prepare
  103. bs = len(targets)
  104. gt_objectness = [
  105. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, 1])
  106. for (fmp_h, fmp_w) in fmp_sizes
  107. ]
  108. gt_classes = [
  109. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, self.num_classes])
  110. for (fmp_h, fmp_w) in fmp_sizes
  111. ]
  112. gt_bboxes = [
  113. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, 4])
  114. for (fmp_h, fmp_w) in fmp_sizes
  115. ]
  116. for batch_index in range(bs):
  117. targets_per_image = targets[batch_index]
  118. # [N,]
  119. tgt_cls = targets_per_image["labels"].numpy()
  120. # [N, 4]
  121. tgt_box = targets_per_image['boxes'].numpy()
  122. for gt_box, gt_label in zip(tgt_box, tgt_cls):
  123. # get a bbox coords
  124. x1, y1, x2, y2 = gt_box.tolist()
  125. # xyxy -> cxcywh
  126. xc, yc = (x2 + x1) * 0.5, (y2 + y1) * 0.5
  127. bw, bh = x2 - x1, y2 - y1
  128. gt_box = np.array([[0., 0., bw, bh]])
  129. # check target
  130. if bw < 1. or bh < 1.:
  131. # invalid target
  132. continue
  133. # compute aspect ratio
  134. ratios = gt_box[..., 2:] / self.anchor_sizes
  135. keeps = np.maximum(ratios, 1 / ratios).max(-1) < self.anchor_theshold
  136. if keeps.sum() == 0:
  137. label_assignment_results = self.iou_assignment([xc, yc], gt_box, fpn_strides)
  138. else:
  139. label_assignment_results = self.aspect_ratio_assignment([xc, yc], keeps, fpn_strides)
  140. # label assignment
  141. for result in label_assignment_results:
  142. stride = fpn_strides[level]
  143. fmp_h, fmp_w = fmp_sizes[level]
  144. # assignment
  145. grid_x, grid_y, xc_s, yc_s, level, anchor_idx = result
  146. # coord on the feature
  147. x1s, y1s = x1 / stride, y1 / stride
  148. x2s, y2s = x2 / stride, y2 / stride
  149. # offset
  150. off_x = xc_s - grid_x
  151. off_y = yc_s - grid_y
  152. if off_x <= 0.5 and off_y <= 0.5: # top left
  153. grids = [(grid_x-1, grid_y), (grid_x, grid_y-1), (grid_x, grid_y)]
  154. elif off_x > 0.5 and off_y <= 0.5: # top right
  155. grids = [(grid_x+1, grid_y), (grid_x, grid_y-1), (grid_x, grid_y)]
  156. elif off_x < 0.5 and off_y > 0.5: # bottom left
  157. grids = [(grid_x-1, grid_y), (grid_x, grid_y+1), (grid_x, grid_y)]
  158. elif off_x > 0.5 and off_y > 0.5: # bottom right
  159. grids = [(grid_x+1, grid_y), (grid_x, grid_y+1), (grid_x, grid_y)]
  160. for (i, j) in grids:
  161. is_in_box = (j >= y1s and j < y2s) and (i >= x1s and i < x2s)
  162. is_valid = (j >= 0 and j < fmp_h) and (i >= 0 and i < fmp_w)
  163. if is_in_box and is_valid:
  164. # obj
  165. gt_objectness[level][batch_index, j, i, anchor_idx] = 1.0
  166. # cls
  167. cls_ont_hot = torch.zeros(self.num_classes)
  168. cls_ont_hot[int(gt_label)] = 1.0
  169. gt_classes[level][batch_index, j, i, anchor_idx] = cls_ont_hot
  170. # box
  171. gt_bboxes[level][batch_index, j, i, anchor_idx] = torch.as_tensor([x1, y1, x2, y2])
  172. # [B, M, C]
  173. gt_objectness = torch.cat([gt.view(bs, -1, 1) for gt in gt_objectness], dim=1).float()
  174. gt_classes = torch.cat([gt.view(bs, -1, self.num_classes) for gt in gt_classes], dim=1).float()
  175. gt_bboxes = torch.cat([gt.view(bs, -1, 4) for gt in gt_bboxes], dim=1).float()
  176. return gt_objectness, gt_classes, gt_bboxes