yolov8_head.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129
  1. import torch
  2. import torch.nn as nn
  3. from .yolov8_basic import Conv
  4. # Single-level Head
  5. class SingleLevelHead(nn.Module):
  6. def __init__(self,
  7. in_dim :int = 256,
  8. cls_head_dim :int = 256,
  9. reg_head_dim :int = 256,
  10. num_cls_head :int = 2,
  11. num_reg_head :int = 2,
  12. act_type :str = "silu",
  13. norm_type :str = "BN",
  14. depthwise :bool = False):
  15. super().__init__()
  16. # --------- Basic Parameters ----------
  17. self.in_dim = in_dim
  18. self.num_cls_head = num_cls_head
  19. self.num_reg_head = num_reg_head
  20. self.act_type = act_type
  21. self.norm_type = norm_type
  22. self.depthwise = depthwise
  23. # --------- Network Parameters ----------
  24. ## cls head
  25. cls_feats = []
  26. self.cls_head_dim = cls_head_dim
  27. for i in range(num_cls_head):
  28. if i == 0:
  29. cls_feats.append(
  30. Conv(in_dim, self.cls_head_dim, k=3, p=1, s=1,
  31. act_type=act_type,
  32. norm_type=norm_type,
  33. depthwise=depthwise)
  34. )
  35. else:
  36. cls_feats.append(
  37. Conv(self.cls_head_dim, self.cls_head_dim, k=3, p=1, s=1,
  38. act_type=act_type,
  39. norm_type=norm_type,
  40. depthwise=depthwise)
  41. )
  42. ## reg head
  43. reg_feats = []
  44. self.reg_head_dim = reg_head_dim
  45. for i in range(num_reg_head):
  46. if i == 0:
  47. reg_feats.append(
  48. Conv(in_dim, self.reg_head_dim, k=3, p=1, s=1,
  49. act_type=act_type,
  50. norm_type=norm_type,
  51. depthwise=depthwise)
  52. )
  53. else:
  54. reg_feats.append(
  55. Conv(self.reg_head_dim, self.reg_head_dim, k=3, p=1, s=1,
  56. act_type=act_type,
  57. norm_type=norm_type,
  58. depthwise=depthwise)
  59. )
  60. self.cls_feats = nn.Sequential(*cls_feats)
  61. self.reg_feats = nn.Sequential(*reg_feats)
  62. self.init_weights()
  63. def init_weights(self):
  64. """Initialize the parameters."""
  65. for m in self.modules():
  66. if isinstance(m, torch.nn.Conv2d):
  67. # In order to be consistent with the source code,
  68. # reset the Conv2d initialization parameters
  69. m.reset_parameters()
  70. def forward(self, x):
  71. """
  72. in_feats: (Tensor) [B, C, H, W]
  73. """
  74. cls_feats = self.cls_feats(x)
  75. reg_feats = self.reg_feats(x)
  76. return cls_feats, reg_feats
  77. # Multi-level Head
  78. class MultiLevelHead(nn.Module):
  79. def __init__(self, cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  80. super().__init__()
  81. ## ----------- Network Parameters -----------
  82. self.multi_level_heads = nn.ModuleList(
  83. [SingleLevelHead(in_dim = in_dims[level],
  84. cls_head_dim = max(in_dims[0], min(num_classes, 100)),
  85. reg_head_dim = max(in_dims[0]//4, 16, 4*reg_max),
  86. num_cls_head = cfg['num_cls_head'],
  87. num_reg_head = cfg['num_reg_head'],
  88. act_type = cfg['head_act'],
  89. norm_type = cfg['head_norm'],
  90. depthwise = cfg['head_depthwise'])
  91. for level in range(num_levels)
  92. ])
  93. # --------- Basic Parameters ----------
  94. self.in_dims = in_dims
  95. self.cls_head_dim = self.multi_level_heads[0].cls_head_dim
  96. self.reg_head_dim = self.multi_level_heads[0].reg_head_dim
  97. def forward(self, feats):
  98. """
  99. feats: List[(Tensor)] [[B, C, H, W], ...]
  100. """
  101. cls_feats = []
  102. reg_feats = []
  103. for feat, head in zip(feats, self.multi_level_heads):
  104. # ---------------- Pred ----------------
  105. cls_feat, reg_feat = head(feat)
  106. cls_feats.append(cls_feat)
  107. reg_feats.append(reg_feat)
  108. return cls_feats, reg_feats
  109. # build detection head
  110. def build_det_head(cfg, in_dims, num_levels=3, num_classes=80, reg_max=16):
  111. if cfg['head'] == 'decoupled_head':
  112. head = MultiLevelHead(cfg, in_dims, num_levels, num_classes, reg_max)
  113. return head