yolov8_head.py 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. import torch
  2. import torch.nn as nn
  3. try:
  4. from .yolov8_basic import Conv
  5. except:
  6. from yolov8_basic import Conv
  7. # Single-level Head
  8. class SingleLevelHead(nn.Module):
  9. def __init__(self, in_dim, cls_head_dim, reg_head_dim, num_cls_head, num_reg_head, act_type, norm_type, depthwise):
  10. super().__init__()
  11. # --------- Basic Parameters ----------
  12. self.in_dim = in_dim
  13. self.num_cls_head = num_cls_head
  14. self.num_reg_head = num_reg_head
  15. self.act_type = act_type
  16. self.norm_type = norm_type
  17. self.depthwise = depthwise
  18. # --------- Network Parameters ----------
  19. ## cls head
  20. cls_feats = []
  21. self.cls_head_dim = cls_head_dim
  22. for i in range(num_cls_head):
  23. if i == 0:
  24. cls_feats.append(
  25. Conv(in_dim, self.cls_head_dim, k=3, p=1, s=1,
  26. act_type=act_type,
  27. norm_type=norm_type,
  28. depthwise=depthwise)
  29. )
  30. else:
  31. cls_feats.append(
  32. Conv(self.cls_head_dim, self.cls_head_dim, k=3, p=1, s=1,
  33. act_type=act_type,
  34. norm_type=norm_type,
  35. depthwise=depthwise)
  36. )
  37. ## reg head
  38. reg_feats = []
  39. self.reg_head_dim = reg_head_dim
  40. for i in range(num_reg_head):
  41. if i == 0:
  42. reg_feats.append(
  43. Conv(in_dim, self.reg_head_dim, k=3, p=1, s=1,
  44. act_type=act_type,
  45. norm_type=norm_type,
  46. depthwise=depthwise)
  47. )
  48. else:
  49. reg_feats.append(
  50. Conv(self.reg_head_dim, self.reg_head_dim, k=3, p=1, s=1,
  51. act_type=act_type,
  52. norm_type=norm_type,
  53. depthwise=depthwise)
  54. )
  55. self.cls_feats = nn.Sequential(*cls_feats)
  56. self.reg_feats = nn.Sequential(*reg_feats)
  57. def forward(self, x):
  58. """
  59. in_feats: (Tensor) [B, C, H, W]
  60. """
  61. cls_feats = self.cls_feats(x)
  62. reg_feats = self.reg_feats(x)
  63. return cls_feats, reg_feats
  64. # Multi-level Head
  65. class MultiLevelHead(nn.Module):
  66. def __init__(self, cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  67. super().__init__()
  68. ## ----------- Network Parameters -----------
  69. self.multi_level_heads = nn.ModuleList(
  70. [SingleLevelHead(
  71. in_dims[level],
  72. max(in_dims[0], min(num_classes, 100)), # cls head out_dim
  73. max(in_dims[0]//4, 16, 4*reg_max), # reg head out_dim
  74. cfg['num_cls_head'],
  75. cfg['num_reg_head'],
  76. cfg['head_act'],
  77. cfg['head_norm'],
  78. cfg['head_depthwise'])
  79. for level in range(num_levels)
  80. ])
  81. # --------- Basic Parameters ----------
  82. self.in_dims = in_dims
  83. self.cls_head_dim = self.multi_level_heads[0].cls_head_dim
  84. self.reg_head_dim = self.multi_level_heads[0].reg_head_dim
  85. def forward(self, feats):
  86. """
  87. feats: List[(Tensor)] [[B, C, H, W], ...]
  88. """
  89. cls_feats = []
  90. reg_feats = []
  91. for feat, head in zip(feats, self.multi_level_heads):
  92. # ---------------- Pred ----------------
  93. cls_feat, reg_feat = head(feat)
  94. cls_feats.append(cls_feat)
  95. reg_feats.append(reg_feat)
  96. return cls_feats, reg_feats
  97. # build detection head
  98. def build_det_head(cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  99. if cfg['head'] == 'decoupled_head':
  100. head = MultiLevelHead(cfg, in_dims, num_levels, num_classes, reg_max)
  101. return head
  102. if __name__ == '__main__':
  103. import time
  104. from thop import profile
  105. cfg = {
  106. 'head': 'decoupled_head',
  107. 'num_cls_head': 2,
  108. 'num_reg_head': 2,
  109. 'head_act': 'silu',
  110. 'head_norm': 'BN',
  111. 'head_depthwise': False,
  112. 'reg_max': 16,
  113. }
  114. fpn_dims = [256, 512, 512]
  115. cls_out_dim = 256
  116. reg_out_dim = 64
  117. # Head-1
  118. model = build_det_head(cfg, fpn_dims, num_levels=3, num_classes=80, reg_max=16)
  119. print(model)
  120. fpn_feats = [torch.randn(1, fpn_dims[0], 80, 80), torch.randn(1, fpn_dims[1], 40, 40), torch.randn(1, fpn_dims[2], 20, 20)]
  121. t0 = time.time()
  122. outputs = model(fpn_feats)
  123. t1 = time.time()
  124. print('Time: ', t1 - t0)
  125. # for out in outputs:
  126. # print(out.shape)
  127. print('==============================')
  128. flops, params = profile(model, inputs=(fpn_feats, ), verbose=False)
  129. print('==============================')
  130. print('Head-1: GFLOPs : {:.2f}'.format(flops / 1e9 * 2))
  131. print('Head-1: Params : {:.2f} M'.format(params / 1e6))