matcher.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. import numpy as np
  2. import torch
  3. class Yolov5Matcher(object):
  4. def __init__(self, num_classes, num_anchors, anchor_size, anchor_theshold):
  5. self.num_classes = num_classes
  6. self.num_anchors = num_anchors
  7. self.anchor_theshold = anchor_theshold
  8. # [KA, 2]
  9. self.anchor_sizes = np.array([[anchor[0], anchor[1]]
  10. for anchor in anchor_size])
  11. # [KA, 4]
  12. self.anchor_boxes = np.array([[0., 0., anchor[0], anchor[1]]
  13. for anchor in anchor_size])
  14. def compute_iou(self, anchor_boxes, gt_box):
  15. """
  16. anchor_boxes : ndarray -> [KA, 4] (cx, cy, bw, bh).
  17. gt_box : ndarray -> [1, 4] (cx, cy, bw, bh).
  18. """
  19. # anchors: [KA, 4]
  20. anchors_xyxy = np.zeros_like(anchor_boxes)
  21. anchors_area = anchor_boxes[..., 2] * anchor_boxes[..., 3]
  22. # convert [cx, cy, bw, bh] -> [x1, y1, x2, y2]
  23. anchors_xyxy[..., :2] = anchor_boxes[..., :2] - anchor_boxes[..., 2:] * 0.5 # x1y1
  24. anchors_xyxy[..., 2:] = anchor_boxes[..., :2] + anchor_boxes[..., 2:] * 0.5 # x2y2
  25. # expand gt_box: [1, 4] -> [KA, 4]
  26. gt_box = np.array(gt_box).reshape(-1, 4)
  27. gt_box = np.repeat(gt_box, anchors_xyxy.shape[0], axis=0)
  28. gt_box_area = gt_box[..., 2] * gt_box[..., 3]
  29. # convert [cx, cy, bw, bh] -> [x1, y1, x2, y2]
  30. gt_box_xyxy = np.zeros_like(gt_box)
  31. gt_box_xyxy[..., :2] = gt_box[..., :2] - gt_box[..., 2:] * 0.5 # x1y1
  32. gt_box_xyxy[..., 2:] = gt_box[..., :2] + gt_box[..., 2:] * 0.5 # x2y2
  33. # intersection
  34. inter_w = np.minimum(anchors_xyxy[:, 2], gt_box_xyxy[:, 2]) - \
  35. np.maximum(anchors_xyxy[:, 0], gt_box_xyxy[:, 0])
  36. inter_h = np.minimum(anchors_xyxy[:, 3], gt_box_xyxy[:, 3]) - \
  37. np.maximum(anchors_xyxy[:, 1], gt_box_xyxy[:, 1])
  38. inter_area = inter_w * inter_h
  39. # union
  40. union_area = anchors_area + gt_box_area - inter_area
  41. # iou
  42. iou = inter_area / union_area
  43. iou = np.clip(iou, a_min=1e-10, a_max=1.0)
  44. return iou
  45. def iou_assignment(self, ctr_points, gt_box, fpn_strides):
  46. # compute IoU
  47. iou = self.compute_iou(self.anchor_boxes, gt_box)
  48. iou_mask = (iou > 0.5)
  49. label_assignment_results = []
  50. if iou_mask.sum() == 0:
  51. # We assign the anchor box with highest IoU score.
  52. iou_ind = np.argmax(iou)
  53. level = iou_ind // self.num_anchors # pyramid level
  54. anchor_idx = iou_ind - level * self.num_anchors # anchor index
  55. # get the corresponding stride
  56. stride = fpn_strides[level]
  57. # compute the grid cell
  58. xc, yc = ctr_points
  59. xc_s = xc / stride
  60. yc_s = yc / stride
  61. grid_x = int(xc_s)
  62. grid_y = int(yc_s)
  63. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  64. else:
  65. for iou_ind, iou_m in enumerate(iou_mask):
  66. if iou_m:
  67. level = iou_ind // self.num_anchors # pyramid level
  68. anchor_idx = iou_ind - level * self.num_anchors # anchor index
  69. # get the corresponding stride
  70. stride = fpn_strides[level]
  71. # compute the gride cell
  72. xc, yc = ctr_points
  73. xc_s = xc / stride
  74. yc_s = yc / stride
  75. grid_x = int(xc_s)
  76. grid_y = int(yc_s)
  77. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  78. return label_assignment_results
  79. def aspect_ratio_assignment(self, ctr_points, keeps, fpn_strides):
  80. label_assignment_results = []
  81. for keep_idx, keep in enumerate(keeps):
  82. if keep:
  83. level = keep_idx // self.num_anchors # pyramid level
  84. anchor_idx = keep_idx - level * self.num_anchors # anchor index
  85. # get the corresponding stride
  86. stride = fpn_strides[level]
  87. # compute the gride cell
  88. xc, yc = ctr_points
  89. xc_s = xc / stride
  90. yc_s = yc / stride
  91. grid_x = int(xc_s)
  92. grid_y = int(yc_s)
  93. label_assignment_results.append([grid_x, grid_y, xc_s, yc_s, level, anchor_idx])
  94. return label_assignment_results
  95. @torch.no_grad()
  96. def __call__(self, fmp_sizes, fpn_strides, targets):
  97. """
  98. fmp_size: (List) [fmp_h, fmp_w]
  99. fpn_strides: (List) -> [8, 16, 32, ...] stride of network output.
  100. targets: (Dict) dict{'boxes': [...],
  101. 'labels': [...],
  102. 'orig_size': ...}
  103. """
  104. assert len(fmp_sizes) == len(fpn_strides)
  105. # prepare
  106. bs = len(targets)
  107. gt_objectness = [
  108. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, 1])
  109. for (fmp_h, fmp_w) in fmp_sizes
  110. ]
  111. gt_classes = [
  112. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, self.num_classes])
  113. for (fmp_h, fmp_w) in fmp_sizes
  114. ]
  115. gt_bboxes = [
  116. torch.zeros([bs, fmp_h, fmp_w, self.num_anchors, 4])
  117. for (fmp_h, fmp_w) in fmp_sizes
  118. ]
  119. for batch_index in range(bs):
  120. targets_per_image = targets[batch_index]
  121. # [N,]
  122. tgt_cls = targets_per_image["labels"].numpy()
  123. # [N, 4]
  124. tgt_box = targets_per_image['boxes'].numpy()
  125. for gt_box, gt_label in zip(tgt_box, tgt_cls):
  126. # get a bbox coords
  127. x1, y1, x2, y2 = gt_box.tolist()
  128. # xyxy -> cxcywh
  129. xc, yc = (x2 + x1) * 0.5, (y2 + y1) * 0.5
  130. bw, bh = x2 - x1, y2 - y1
  131. gt_box = np.array([[0., 0., bw, bh]])
  132. # check target
  133. if bw < 1. or bh < 1.:
  134. # invalid target
  135. continue
  136. # compute aspect ratio
  137. ratios = gt_box[..., 2:] / self.anchor_sizes
  138. keeps = np.maximum(ratios, 1 / ratios).max(-1) < self.anchor_theshold
  139. if keeps.sum() == 0:
  140. label_assignment_results = self.iou_assignment([xc, yc], gt_box, fpn_strides)
  141. else:
  142. label_assignment_results = self.aspect_ratio_assignment([xc, yc], keeps, fpn_strides)
  143. # label assignment
  144. for result in label_assignment_results:
  145. # assignment
  146. grid_x, grid_y, xc_s, yc_s, level, anchor_idx = result
  147. stride = fpn_strides[level]
  148. fmp_h, fmp_w = fmp_sizes[level]
  149. # coord on the feature
  150. x1s, y1s = x1 / stride, y1 / stride
  151. x2s, y2s = x2 / stride, y2 / stride
  152. # offset
  153. off_x = xc_s - grid_x
  154. off_y = yc_s - grid_y
  155. if off_x <= 0.5 and off_y <= 0.5: # top left
  156. grids = [(grid_x-1, grid_y), (grid_x, grid_y-1), (grid_x, grid_y)]
  157. elif off_x > 0.5 and off_y <= 0.5: # top right
  158. grids = [(grid_x+1, grid_y), (grid_x, grid_y-1), (grid_x, grid_y)]
  159. elif off_x <= 0.5 and off_y > 0.5: # bottom left
  160. grids = [(grid_x-1, grid_y), (grid_x, grid_y+1), (grid_x, grid_y)]
  161. elif off_x > 0.5 and off_y > 0.5: # bottom right
  162. grids = [(grid_x+1, grid_y), (grid_x, grid_y+1), (grid_x, grid_y)]
  163. for (i, j) in grids:
  164. is_in_box = (j >= y1s and j < y2s) and (i >= x1s and i < x2s)
  165. is_valid = (j >= 0 and j < fmp_h) and (i >= 0 and i < fmp_w)
  166. if is_in_box and is_valid:
  167. # obj
  168. gt_objectness[level][batch_index, j, i, anchor_idx] = 1.0
  169. # cls
  170. cls_ont_hot = torch.zeros(self.num_classes)
  171. cls_ont_hot[int(gt_label)] = 1.0
  172. gt_classes[level][batch_index, j, i, anchor_idx] = cls_ont_hot
  173. # box
  174. gt_bboxes[level][batch_index, j, i, anchor_idx] = torch.as_tensor([x1, y1, x2, y2])
  175. # [B, M, C]
  176. gt_objectness = torch.cat([gt.view(bs, -1, 1) for gt in gt_objectness], dim=1).float()
  177. gt_classes = torch.cat([gt.view(bs, -1, self.num_classes) for gt in gt_classes], dim=1).float()
  178. gt_bboxes = torch.cat([gt.view(bs, -1, 4) for gt in gt_bboxes], dim=1).float()
  179. return gt_objectness, gt_classes, gt_bboxes