|
|
@@ -8,7 +8,6 @@ except:
|
|
|
|
|
|
|
|
|
model_urls = {
|
|
|
- "elannet_nano": "https://github.com/yjh0410/image_classification_pytorch/releases/download/weight/yolov7_elannet_nano.pth",
|
|
|
"elannet_tiny": "https://github.com/yjh0410/image_classification_pytorch/releases/download/weight/yolov7_elannet_tiny.pth",
|
|
|
"elannet_large": "https://github.com/yjh0410/image_classification_pytorch/releases/download/weight/yolov7_elannet_large.pth",
|
|
|
"elannet_huge": "https://github.com/yjh0410/image_classification_pytorch/releases/download/weight/yolov7_elannet_huge.pth",
|
|
|
@@ -16,86 +15,40 @@ model_urls = {
|
|
|
|
|
|
|
|
|
# --------------------- ELANNet -----------------------
|
|
|
-# ELANNet-Nano
|
|
|
-class ELANNet_Nano(nn.Module):
|
|
|
- def __init__(self, act_type='lrelu', norm_type='BN', depthwise=True):
|
|
|
- super(ELANNet_Nano, self).__init__()
|
|
|
- self.feat_dims = [64, 128, 256]
|
|
|
-
|
|
|
- # P1/2
|
|
|
- self.layer_1 = Conv(3, 16, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- # P2/4
|
|
|
- self.layer_2 = nn.Sequential(
|
|
|
- Conv(16, 32, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=32, out_dim=32, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- )
|
|
|
- # P3/8
|
|
|
- self.layer_3 = nn.Sequential(
|
|
|
- nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=32, out_dim=64, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- )
|
|
|
- # P4/16
|
|
|
- self.layer_4 = nn.Sequential(
|
|
|
- nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=64, out_dim=128, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- )
|
|
|
- # P5/32
|
|
|
- self.layer_5 = nn.Sequential(
|
|
|
- nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=128, out_dim=256, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- )
|
|
|
-
|
|
|
-
|
|
|
- def forward(self, x):
|
|
|
- c1 = self.layer_1(x)
|
|
|
- c2 = self.layer_2(c1)
|
|
|
- c3 = self.layer_3(c2)
|
|
|
- c4 = self.layer_4(c3)
|
|
|
- c5 = self.layer_5(c4)
|
|
|
-
|
|
|
- outputs = [c3, c4, c5]
|
|
|
-
|
|
|
- return outputs
|
|
|
-
|
|
|
-
|
|
|
-# ELANNet-Tiny
|
|
|
+## ELANNet-Tiny
|
|
|
class ELANNet_Tiny(nn.Module):
|
|
|
"""
|
|
|
ELAN-Net of YOLOv7-Tiny.
|
|
|
"""
|
|
|
def __init__(self, act_type='silu', norm_type='BN', depthwise=False):
|
|
|
super(ELANNet_Tiny, self).__init__()
|
|
|
- self.feat_dims = [128, 256, 512]
|
|
|
+ # -------------- Basic parameters --------------
|
|
|
+ self.feat_dims = [32, 64, 128, 256, 512]
|
|
|
+ self.squeeze_ratios = [0.5, 0.5, 0.5, 0.5] # Stage-1 -> Stage-4
|
|
|
+ self.branch_depths = [1, 1, 1, 1] # Stage-1 -> Stage-4
|
|
|
|
|
|
- # P1/2
|
|
|
- self.layer_1 = Conv(3, 32, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
- # P2/4
|
|
|
+ # -------------- Network parameters --------------
|
|
|
+ ## P1/2
|
|
|
+ self.layer_1 = Conv(3, self.feat_dims[0], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ ## P2/4: Stage-1
|
|
|
self.layer_2 = nn.Sequential(
|
|
|
- Conv(32, 64, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=64, out_dim=64, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ Conv(self.feat_dims[0], self.feat_dims[1], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[1], self.feat_dims[1], self.squeeze_ratios[0], self.branch_depths[0], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P3/8
|
|
|
+ ## P3/8: Stage-2
|
|
|
self.layer_3 = nn.Sequential(
|
|
|
nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=64, out_dim=128, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ ELANBlock(self.feat_dims[1], self.feat_dims[2], self.squeeze_ratios[1], self.branch_depths[1], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P4/16
|
|
|
+ ## P4/16: Stage-3
|
|
|
self.layer_4 = nn.Sequential(
|
|
|
nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=128, out_dim=256, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ ELANBlock(self.feat_dims[2], self.feat_dims[3], self.squeeze_ratios[2], self.branch_depths[2], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P5/32
|
|
|
+ ## P5/32: Stage-4
|
|
|
self.layer_5 = nn.Sequential(
|
|
|
nn.MaxPool2d((2, 2), 2),
|
|
|
- ELANBlock(in_dim=256, out_dim=512, expand_ratio=0.5, depth=1,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ ELANBlock(self.feat_dims[3], self.feat_dims[4], self.squeeze_ratios[3], self.branch_depths[3], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
|
|
|
|
|
|
@@ -110,42 +63,41 @@ class ELANNet_Tiny(nn.Module):
|
|
|
|
|
|
return outputs
|
|
|
|
|
|
-
|
|
|
## ELANNet-Large
|
|
|
class ELANNet_Lagre(nn.Module):
|
|
|
def __init__(self, act_type='silu', norm_type='BN', depthwise=False):
|
|
|
super(ELANNet_Lagre, self).__init__()
|
|
|
- self.feat_dims = [512, 1024, 1024]
|
|
|
-
|
|
|
- # P1/2
|
|
|
+ # -------------------- Basic parameters --------------------
|
|
|
+ self.feat_dims = [32, 64, 128, 256, 512, 1024, 1024]
|
|
|
+ self.squeeze_ratios = [0.5, 0.5, 0.5, 0.25] # Stage-1 -> Stage-4
|
|
|
+ self.branch_depths = [2, 2, 2, 2] # Stage-1 -> Stage-4
|
|
|
+
|
|
|
+ # -------------------- Network parameters --------------------
|
|
|
+ ## P1/2
|
|
|
self.layer_1 = nn.Sequential(
|
|
|
- Conv(3, 32, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- Conv(32, 64, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- Conv(64, 64, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ Conv(3, self.feat_dims[0], k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ Conv(self.feat_dims[0], self.feat_dims[1], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ Conv(self.feat_dims[1], self.feat_dims[1], k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P2/4
|
|
|
+ ## P2/4: Stage-1
|
|
|
self.layer_2 = nn.Sequential(
|
|
|
- Conv(64, 128, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=128, out_dim=256, expand_ratio=0.5, depth=2,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ Conv(self.feat_dims[1], self.feat_dims[2], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[2], self.feat_dims[3], self.squeeze_ratios[0], self.branch_depths[0], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P3/8
|
|
|
+ ## P3/8: Stage-2
|
|
|
self.layer_3 = nn.Sequential(
|
|
|
- DownSample(in_dim=256, out_dim=256, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=256, out_dim=512, expand_ratio=0.5, depth=2,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[3], self.feat_dims[3], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[3], self.feat_dims[4], self.squeeze_ratios[1], self.branch_depths[1], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P4/16
|
|
|
+ ## P4/16: Stage-3
|
|
|
self.layer_4 = nn.Sequential(
|
|
|
- DownSample(in_dim=512, out_dim=512, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=512, out_dim=1024, expand_ratio=0.5, depth=2,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[4], self.feat_dims[4], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[4], self.feat_dims[5], self.squeeze_ratios[2], self.branch_depths[2], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P5/32
|
|
|
+ ## P5/32: Stage-4
|
|
|
self.layer_5 = nn.Sequential(
|
|
|
- DownSample(in_dim=1024, out_dim=1024, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=1024, out_dim=1024, expand_ratio=0.25, depth=2,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[5], self.feat_dims[5], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[5], self.feat_dims[6], self.squeeze_ratios[3], self.branch_depths[3], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
|
|
|
|
|
|
@@ -160,42 +112,41 @@ class ELANNet_Lagre(nn.Module):
|
|
|
|
|
|
return outputs
|
|
|
|
|
|
-
|
|
|
## ELANNet-Huge
|
|
|
class ELANNet_Huge(nn.Module):
|
|
|
def __init__(self, act_type='silu', norm_type='BN', depthwise=False):
|
|
|
super(ELANNet_Huge, self).__init__()
|
|
|
- self.feat_dims = [640, 1280, 1280]
|
|
|
-
|
|
|
- # P1/2
|
|
|
+ # -------------------- Basic parameters --------------------
|
|
|
+ self.feat_dims = [40, 80, 160, 320, 640, 1280, 1280]
|
|
|
+ self.squeeze_ratios = [0.5, 0.5, 0.5, 0.25] # Stage-1 -> Stage-4
|
|
|
+ self.branch_depths = [3, 3, 3, 3] # Stage-1 -> Stage-4
|
|
|
+
|
|
|
+ # -------------------- Network parameters --------------------
|
|
|
+ ## P1/2
|
|
|
self.layer_1 = nn.Sequential(
|
|
|
- Conv(3, 40, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- Conv(40, 80, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- Conv(80, 80, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ Conv(3, self.feat_dims[0], k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ Conv(self.feat_dims[0], self.feat_dims[1], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ Conv(self.feat_dims[1], self.feat_dims[1], k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P2/4
|
|
|
+ ## P2/4: Stage-1
|
|
|
self.layer_2 = nn.Sequential(
|
|
|
- Conv(80, 160, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=160, out_dim=320, expand_ratio=0.5, depth=3,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ Conv(self.feat_dims[1], self.feat_dims[2], k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[2], self.feat_dims[3], self.squeeze_ratios[0], self.branch_depths[0], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P3/8
|
|
|
+ ## P3/8: Stage-2
|
|
|
self.layer_3 = nn.Sequential(
|
|
|
- DownSample(in_dim=320, out_dim=320, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=320, out_dim=640, expand_ratio=0.5, depth=3,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[3], self.feat_dims[3], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[3], self.feat_dims[4], self.squeeze_ratios[1], self.branch_depths[1], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P4/16
|
|
|
+ ## P4/16: Stage-3
|
|
|
self.layer_4 = nn.Sequential(
|
|
|
- DownSample(in_dim=640, out_dim=640, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=640, out_dim=1280, expand_ratio=0.5, depth=3,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[4], self.feat_dims[4], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[4], self.feat_dims[5], self.squeeze_ratios[2], self.branch_depths[2], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
- # P5/32
|
|
|
+ ## P5/32: Stage-4
|
|
|
self.layer_5 = nn.Sequential(
|
|
|
- DownSample(in_dim=1280, out_dim=1280, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
- ELANBlock(in_dim=1280, out_dim=1280, expand_ratio=0.25, depth=3,
|
|
|
- act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
+ DownSample(self.feat_dims[5], self.feat_dims[5], act_type=act_type, norm_type=norm_type, depthwise=depthwise),
|
|
|
+ ELANBlock(self.feat_dims[5], self.feat_dims[6], self.squeeze_ratios[3], self.branch_depths[3], act_type=act_type, norm_type=norm_type, depthwise=depthwise)
|
|
|
)
|
|
|
|
|
|
|
|
|
@@ -212,11 +163,8 @@ class ELANNet_Huge(nn.Module):
|
|
|
|
|
|
|
|
|
# --------------------- Functions -----------------------
|
|
|
+## build backbone
|
|
|
def build_backbone(cfg, pretrained=False):
|
|
|
- """Constructs a ELANNet model.
|
|
|
- Args:
|
|
|
- pretrained (bool): If True, returns a model pre-trained on ImageNet
|
|
|
- """
|
|
|
# build backbone
|
|
|
if cfg['backbone'] == 'elannet_huge':
|
|
|
backbone = ELANNet_Huge(cfg['bk_act'], cfg['bk_norm'], cfg['bk_dpw'])
|
|
|
@@ -224,10 +172,8 @@ def build_backbone(cfg, pretrained=False):
|
|
|
backbone = ELANNet_Lagre(cfg['bk_act'], cfg['bk_norm'], cfg['bk_dpw'])
|
|
|
elif cfg['backbone'] == 'elannet_tiny':
|
|
|
backbone = ELANNet_Tiny(cfg['bk_act'], cfg['bk_norm'], cfg['bk_dpw'])
|
|
|
- elif cfg['backbone'] == 'elannet_nano':
|
|
|
- backbone = ELANNet_Nano(cfg['bk_act'], cfg['bk_norm'], cfg['bk_dpw'])
|
|
|
# pyramid feat dims
|
|
|
- feat_dims = backbone.feat_dims
|
|
|
+ feat_dims = backbone.feat_dims[-3:]
|
|
|
|
|
|
# load imagenet pretrained weight
|
|
|
if pretrained:
|
|
|
@@ -262,13 +208,11 @@ if __name__ == '__main__':
|
|
|
import time
|
|
|
from thop import profile
|
|
|
cfg = {
|
|
|
- 'pretrained': False,
|
|
|
+ 'pretrained': True,
|
|
|
'backbone': 'elannet_huge',
|
|
|
'bk_act': 'silu',
|
|
|
'bk_norm': 'BN',
|
|
|
'bk_dpw': False,
|
|
|
- 'p6_feat': False,
|
|
|
- 'p7_feat': False,
|
|
|
}
|
|
|
model, feats = build_backbone(cfg)
|
|
|
x = torch.randn(1, 3, 224, 224)
|