| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111 |
- import torch
- import torch.nn as nn
- from .yolo_free_v2_basic import Conv
- class SingleLevelHead(nn.Module):
- def __init__(self, cfg, in_dim, out_dim, num_classes):
- super().__init__()
- # --------- Basic Parameters ----------
- self.in_dim = in_dim
- self.num_classes = num_classes
- self.num_cls_head = cfg['num_cls_head']
- self.num_reg_head = cfg['num_reg_head']
- self.act_type = cfg['head_act']
- self.norm_type = cfg['head_norm']
- self.depthwise = cfg['head_depthwise']
-
- # --------- Network Parameters ----------
- ## cls head
- cls_feats = []
- self.cls_head_dim = max(out_dim, num_classes)
- for i in range(self.num_cls_head):
- if i == 0:
- cls_feats.append(
- Conv(in_dim, self.cls_head_dim, k=3, p=1, s=1,
- act_type=self.act_type,
- norm_type=self.norm_type,
- depthwise=self.depthwise)
- )
- else:
- cls_feats.append(
- Conv(self.cls_head_dim, self.cls_head_dim, k=3, p=1, s=1,
- act_type=self.act_type,
- norm_type=self.norm_type,
- depthwise=self.depthwise)
- )
- ## reg head
- reg_feats = []
- self.reg_head_dim = max(out_dim, 4*cfg['reg_max'])
- for i in range(self.num_reg_head):
- if i == 0:
- reg_feats.append(
- Conv(in_dim, self.reg_head_dim, k=3, p=1, s=1,
- act_type=self.act_type,
- norm_type=self.norm_type,
- depthwise=self.depthwise)
- )
- else:
- reg_feats.append(
- Conv(self.reg_head_dim, self.reg_head_dim, k=3, p=1, s=1,
- act_type=self.act_type,
- norm_type=self.norm_type,
- depthwise=self.depthwise)
- )
- self.cls_feats = nn.Sequential(*cls_feats)
- self.reg_feats = nn.Sequential(*reg_feats)
- def forward(self, x):
- """
- in_feats: (Tensor) [B, C, H, W]
- """
- cls_feats = self.cls_feats(x)
- reg_feats = self.reg_feats(x)
- return cls_feats, reg_feats
-
- class MultiLevelHead(nn.Module):
- def __init__(self, cfg, in_dims, out_dim, num_classes=80, num_levels=3):
- super().__init__()
- ## ----------- Network Parameters -----------
- self.multi_level_heads = nn.ModuleList(
- [SingleLevelHead(
- cfg,
- in_dims[level],
- out_dim,
- num_classes)
- for level in range(num_levels)
- ])
- # --------- Basic Parameters ----------
- self.in_dims = in_dims
- self.num_classes = num_classes
- self.cls_head_dim = self.multi_level_heads[0].cls_head_dim
- self.reg_head_dim = self.multi_level_heads[0].reg_head_dim
- def forward(self, feats):
- """
- feats: List[(Tensor)] [[B, C, H, W], ...]
- """
- cls_feats = []
- reg_feats = []
- for feat, head in zip(feats, self.multi_level_heads):
- # ---------------- Pred ----------------
- cls_feat, reg_feat = head(feat)
- cls_feats.append(cls_feat)
- reg_feats.append(reg_feat)
- return cls_feats, reg_feats
-
- # build detection head
- def build_det_head(cfg, in_dim, out_dim, num_classes=80, num_levels=3):
- if cfg['head'] == 'decoupled_head':
- head = MultiLevelHead(cfg, in_dim, out_dim, num_classes, num_levels)
- return head
|