rtcdet_head.py 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. import torch
  2. import torch.nn as nn
  3. try:
  4. from .rtcdet_basic import BasicConv
  5. except:
  6. from rtcdet_basic import BasicConv
  7. # Single-level Head
  8. class SingleLevelHead(nn.Module):
  9. def __init__(self,
  10. in_dim :int = 256,
  11. cls_head_dim :int = 256,
  12. reg_head_dim :int = 256,
  13. num_cls_head :int = 2,
  14. num_reg_head :int = 2,
  15. act_type :str = "silu",
  16. norm_type :str = "BN",
  17. depthwise :bool = False):
  18. super().__init__()
  19. # --------- Basic Parameters ----------
  20. self.in_dim = in_dim
  21. self.num_cls_head = num_cls_head
  22. self.num_reg_head = num_reg_head
  23. self.act_type = act_type
  24. self.norm_type = norm_type
  25. self.depthwise = depthwise
  26. # --------- Network Parameters ----------
  27. ## cls head
  28. cls_feats = []
  29. self.cls_head_dim = cls_head_dim
  30. for i in range(num_cls_head):
  31. if i == 0:
  32. cls_feats.append(
  33. BasicConv(in_dim, self.cls_head_dim, kernel_size=3, padding=1, stride=1,
  34. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  35. )
  36. else:
  37. cls_feats.append(
  38. BasicConv(self.cls_head_dim, self.cls_head_dim, kernel_size=3, padding=1, stride=1,
  39. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  40. )
  41. ## reg head
  42. reg_feats = []
  43. self.reg_head_dim = reg_head_dim
  44. for i in range(num_reg_head):
  45. if i == 0:
  46. reg_feats.append(
  47. BasicConv(in_dim, self.reg_head_dim, kernel_size=3, padding=1, stride=1,
  48. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  49. )
  50. else:
  51. reg_feats.append(
  52. BasicConv(self.reg_head_dim, self.reg_head_dim, kernel_size=3, padding=1, stride=1,
  53. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  54. )
  55. self.cls_feats = nn.Sequential(*cls_feats)
  56. self.reg_feats = nn.Sequential(*reg_feats)
  57. self.init_weights()
  58. def init_weights(self):
  59. """Initialize the parameters."""
  60. for m in self.modules():
  61. if isinstance(m, torch.nn.Conv2d):
  62. # In order to be consistent with the source code,
  63. # reset the Conv2d initialization parameters
  64. m.reset_parameters()
  65. def forward(self, x):
  66. """
  67. in_feats: (Tensor) [B, C, H, W]
  68. """
  69. cls_feats = self.cls_feats(x)
  70. reg_feats = self.reg_feats(x)
  71. return cls_feats, reg_feats
  72. # Multi-level Head
  73. class MultiLevelHead(nn.Module):
  74. def __init__(self, cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  75. super().__init__()
  76. ## ----------- Network Parameters -----------
  77. self.multi_level_heads = nn.ModuleList(
  78. [SingleLevelHead(in_dim = in_dims[level],
  79. cls_head_dim = max(in_dims[0], min(num_classes, 100)),
  80. reg_head_dim = max(in_dims[0]//4, 16, 4*reg_max),
  81. num_cls_head = cfg['num_cls_head'],
  82. num_reg_head = cfg['num_reg_head'],
  83. act_type = cfg['head_act'],
  84. norm_type = cfg['head_norm'],
  85. depthwise = cfg['head_depthwise'])
  86. for level in range(num_levels)
  87. ])
  88. # --------- Basic Parameters ----------
  89. self.in_dims = in_dims
  90. self.cls_head_dim = self.multi_level_heads[0].cls_head_dim
  91. self.reg_head_dim = self.multi_level_heads[0].reg_head_dim
  92. def forward(self, feats):
  93. """
  94. feats: List[(Tensor)] [[B, C, H, W], ...]
  95. """
  96. cls_feats = []
  97. reg_feats = []
  98. for feat, head in zip(feats, self.multi_level_heads):
  99. # ---------------- Pred ----------------
  100. cls_feat, reg_feat = head(feat)
  101. cls_feats.append(cls_feat)
  102. reg_feats.append(reg_feat)
  103. return cls_feats, reg_feats
  104. # build detection head
  105. def build_head(cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  106. if cfg['head'] == 'decoupled_head':
  107. head = MultiLevelHead(cfg, in_dims, num_levels, num_classes, reg_max)
  108. return head
  109. if __name__ == '__main__':
  110. import time
  111. from thop import profile
  112. cfg = {
  113. 'head': 'decoupled_head',
  114. 'num_cls_head': 2,
  115. 'num_reg_head': 2,
  116. 'head_act': 'silu',
  117. 'head_norm': 'BN',
  118. 'head_depthwise': False,
  119. 'reg_max': 16,
  120. }
  121. fpn_dims = [256, 256, 256]
  122. cls_out_dim = 256
  123. reg_out_dim = 256
  124. # Head-1
  125. model = build_head(cfg, fpn_dims, num_levels=3, num_classes=80, reg_max=16)
  126. print(model)
  127. fpn_feats = [torch.randn(1, fpn_dims[0], 80, 80), torch.randn(1, fpn_dims[1], 40, 40), torch.randn(1, fpn_dims[2], 20, 20)]
  128. t0 = time.time()
  129. outputs = model(fpn_feats)
  130. t1 = time.time()
  131. print('Time: ', t1 - t0)
  132. # for out in outputs:
  133. # print(out.shape)
  134. print('==============================')
  135. flops, params = profile(model, inputs=(fpn_feats, ), verbose=False)
  136. print('==============================')
  137. print('Head-1: GFLOPs : {:.2f}'.format(flops / 1e9 * 2))
  138. print('Head-1: Params : {:.2f} M'.format(params / 1e6))