gelan_head.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. import torch
  2. import torch.nn as nn
  3. from .gelan_basic import BasicConv
  4. # Single-level Head
  5. class SingleLevelHead(nn.Module):
  6. def __init__(self,
  7. in_dim :int = 256,
  8. cls_head_dim :int = 256,
  9. reg_head_dim :int = 256,
  10. num_cls_head :int = 2,
  11. num_reg_head :int = 2,
  12. act_type :str = "silu",
  13. norm_type :str = "BN",
  14. depthwise :bool = False):
  15. super().__init__()
  16. # --------- Basic Parameters ----------
  17. self.in_dim = in_dim
  18. self.num_cls_head = num_cls_head
  19. self.num_reg_head = num_reg_head
  20. self.act_type = act_type
  21. self.norm_type = norm_type
  22. self.depthwise = depthwise
  23. # --------- Network Parameters ----------
  24. ## cls head
  25. cls_feats = []
  26. self.cls_head_dim = cls_head_dim
  27. for i in range(num_cls_head):
  28. if i == 0:
  29. cls_feats.append(
  30. BasicConv(in_dim, self.cls_head_dim,
  31. kernel_size=3, padding=1, stride=1,
  32. act_type=act_type,
  33. norm_type=norm_type,
  34. depthwise=depthwise)
  35. )
  36. else:
  37. cls_feats.append(
  38. BasicConv(self.cls_head_dim, self.cls_head_dim,
  39. kernel_size=3, padding=1, stride=1,
  40. act_type=act_type,
  41. norm_type=norm_type,
  42. depthwise=depthwise)
  43. )
  44. ## reg head
  45. reg_feats = []
  46. self.reg_head_dim = reg_head_dim
  47. for i in range(num_reg_head):
  48. if i == 0:
  49. reg_feats.append(
  50. BasicConv(in_dim, self.reg_head_dim,
  51. kernel_size=3, padding=1, stride=1,
  52. act_type=act_type,
  53. norm_type=norm_type,
  54. depthwise=depthwise)
  55. )
  56. else:
  57. reg_feats.append(
  58. BasicConv(self.reg_head_dim, self.reg_head_dim,
  59. kernel_size=3, padding=1, stride=1, group=4,
  60. act_type=act_type,
  61. norm_type=norm_type,
  62. depthwise=depthwise)
  63. )
  64. self.cls_feats = nn.Sequential(*cls_feats)
  65. self.reg_feats = nn.Sequential(*reg_feats)
  66. self.init_weights()
  67. def init_weights(self):
  68. """Initialize the parameters."""
  69. for m in self.modules():
  70. if isinstance(m, torch.nn.Conv2d):
  71. # In order to be consistent with the source code,
  72. # reset the Conv2d initialization parameters
  73. m.reset_parameters()
  74. def forward(self, x):
  75. """
  76. in_feats: (Tensor) [B, C, H, W]
  77. """
  78. cls_feats = self.cls_feats(x)
  79. reg_feats = self.reg_feats(x)
  80. return cls_feats, reg_feats
  81. # Multi-level Head
  82. class GElanDetHead(nn.Module):
  83. def __init__(self, cfg, in_dims):
  84. super().__init__()
  85. ## ----------- Network Parameters -----------
  86. self.multi_level_heads = nn.ModuleList(
  87. [SingleLevelHead(in_dim = in_dims[level],
  88. cls_head_dim = max(in_dims[0], min(cfg.num_classes * 2, 128)),
  89. reg_head_dim = max(in_dims[0]//4, 16, 4*cfg.reg_max),
  90. num_cls_head = cfg.num_cls_head,
  91. num_reg_head = cfg.num_reg_head,
  92. act_type = cfg.head_act,
  93. norm_type = cfg.head_norm,
  94. depthwise = cfg.head_depthwise)
  95. for level in range(cfg.num_levels)
  96. ])
  97. # --------- Basic Parameters ----------
  98. self.in_dims = in_dims
  99. self.cls_head_dim = self.multi_level_heads[0].cls_head_dim
  100. self.reg_head_dim = self.multi_level_heads[0].reg_head_dim
  101. def forward(self, feats):
  102. """
  103. feats: List[(Tensor)] [[B, C, H, W], ...]
  104. """
  105. cls_feats = []
  106. reg_feats = []
  107. for feat, head in zip(feats, self.multi_level_heads):
  108. # ---------------- Pred ----------------
  109. cls_feat, reg_feat = head(feat)
  110. cls_feats.append(cls_feat)
  111. reg_feats.append(reg_feat)
  112. return cls_feats, reg_feats