yjh0410 1 an în urmă
părinte
comite
57a5f2c28c

+ 19 - 19
yolo/models/gelan/gelan_backbone.py

@@ -8,7 +8,7 @@ except:
 
 # IN1K pretrained weight
 pretrained_urls = {
-    's': None,
+    's': "https://github.com/yjh0410/ICLab/releases/download/in1k_pretrained/gelan_s.pth",
     'm': None,
     'l': None,
     'x': None,
@@ -159,26 +159,26 @@ if __name__ == '__main__':
             self.bk_act = 'silu'
             self.bk_norm = 'BN'
             self.bk_depthwise = False
-            # Gelan-C scale
-            self.backbone_feats = {
-                "c1": [64],
-                "c2": [128, [128, 64], 256],
-                "c3": [256, [256, 128], 512],
-                "c4": [512, [512, 256], 512],
-                "c5": [512, [512, 256], 512],
-            }
-            # # Gelan-S scale
-            # self.scale = "l"
-            # self.backbone_depth = 1
+            # # Gelan-C scale
             # self.backbone_feats = {
-            #     "c1": [32],
-            #     "c2": [64,  [64, 32],   64],
-            #     "c3": [64,  [64, 32],   128],
-            #     "c4": [128, [128, 64],  256],
-            #     "c5": [256, [256, 128], 256],
+            #     "c1": [64],
+            #     "c2": [128, [128, 64], 256],
+            #     "c3": [256, [256, 128], 512],
+            #     "c4": [512, [512, 256], 512],
+            #     "c5": [512, [512, 256], 512],
             # }
-            # self.scale = "s"
-            # self.backbone_depth = 3
+            # self.scale = "l"
+            # self.backbone_depth = 1
+            # Gelan-S scale
+            self.backbone_feats = {
+                "c1": [32],
+                "c2": [64,  [64, 32],   64],
+                "c3": [64,  [64, 32],   128],
+                "c4": [128, [128, 64],  256],
+                "c5": [256, [256, 128], 256],
+            }
+            self.scale = "s"
+            self.backbone_depth = 3
 
     cfg = BaseConfig()
     model = build_backbone(cfg)

+ 54 - 3
yolo/models/gelan/gelan_head.py

@@ -1,8 +1,11 @@
 import torch
 import torch.nn as nn
 
-from .gelan_basic import BasicConv
-
+try:
+    from .gelan_basic import BasicConv
+except:
+    from  gelan_basic import BasicConv
+    
 
 # Single-level Head
 class SingleLevelHead(nn.Module):
@@ -122,4 +125,52 @@ class GElanDetHead(nn.Module):
             cls_feats.append(cls_feat)
             reg_feats.append(reg_feat)
 
-        return cls_feats, reg_feats
+        return cls_feats, reg_feats
+    
+
+
+if __name__=='__main__':
+    import time
+    from thop import profile
+    # Model config
+    
+    # GElan-Base config
+    class GElanBaseConfig(object):
+        def __init__(self) -> None:
+            # ---------------- Model config ----------------
+            self.reg_max  = 16
+            self.out_stride = [8, 16, 32]
+            self.max_stride = 32
+            self.num_levels = 3
+            ## Head
+            self.head_act  = 'lrelu'
+            self.head_norm = 'BN'
+            self.head_depthwise = False
+            self.num_cls_head   = 2
+            self.num_reg_head   = 2
+
+    cfg = GElanBaseConfig()
+    cfg.num_classes = 20
+
+    # Build a head
+    fpn_dims = [128, 256, 256]
+    pyramid_feats = [torch.randn(1, fpn_dims[0], 80, 80),
+                     torch.randn(1, fpn_dims[1], 40, 40),
+                     torch.randn(1, fpn_dims[2], 20, 20)]
+    head = GElanDetHead(cfg, fpn_dims)
+
+
+    # Inference
+    t0 = time.time()
+    cls_feats, reg_feats = head(pyramid_feats)
+    t1 = time.time()
+    print('Time: ', t1 - t0)
+    print("====== GElan Head output ======")
+    for level, (cls_f, reg_f) in enumerate(zip(cls_feats, reg_feats)):
+        print("- Level-{} : ".format(level), cls_f.shape, reg_f.shape)
+
+    flops, params = profile(head, inputs=(pyramid_feats, ), verbose=False)
+    print('==============================')
+    print('GFLOPs : {:.2f}'.format(flops / 1e9 * 2))
+    print('Params : {:.2f} M'.format(params / 1e6))
+    

+ 57 - 2
yolo/models/gelan/gelan_pafpn.py

@@ -3,7 +3,10 @@ import torch.nn as nn
 import torch.nn.functional as F
 from typing import List
 
-from .gelan_basic import RepGElanLayer, ADown
+try:
+    from .gelan_basic import RepGElanLayer, ADown
+except:
+    from  gelan_basic import RepGElanLayer, ADown
 
 
 # PaFPN-ELAN
@@ -100,4 +103,56 @@ class GElanPaFPN(nn.Module):
         out_feats = [p3, p4, p5] # [P3, P4, P5]
 
         return out_feats
-    
+
+
+if __name__=='__main__':
+    import time
+    from thop import profile
+    # Model config
+    
+    # GElan-Base config
+    class GElanBaseConfig(object):
+        def __init__(self) -> None:
+            # ---------------- Model config ----------------
+            self.width    = 0.50
+            self.depth    = 0.34
+            self.ratio    = 2.0
+            self.out_stride = [8, 16, 32]
+            self.max_stride = 32
+            self.num_levels = 3
+            ## FPN
+            self.fpn      = 'gelan_pafpn'
+            self.fpn_act  = 'silu'
+            self.fpn_norm = 'BN'
+            self.fpn_depthwise = False
+            self.fpn_depth    = 3
+            self.fpn_feats_td = {
+                "p4": [[256, 128], 256],
+                "p3": [[128, 64],  128],
+            }
+            self.fpn_feats_bu = {
+                "p4": [[256, 128], 256],
+                "p5": [[256, 128], 256],
+            }
+
+    cfg = GElanBaseConfig()
+    # Build a head
+    in_dims  = [128, 256, 256]
+    fpn = GElanPaFPN(cfg, in_dims)
+
+    # Inference
+    x = [torch.randn(1, in_dims[0], 80, 80),
+         torch.randn(1, in_dims[1], 40, 40),
+         torch.randn(1, in_dims[2], 20, 20)]
+    t0 = time.time()
+    output = fpn(x)
+    t1 = time.time()
+    print('Time: ', t1 - t0)
+    print('====== FPN output ====== ')
+    for level, feat in enumerate(output):
+        print("- Level-{} : ".format(level), feat.shape)
+
+    flops, params = profile(fpn, inputs=(x, ), verbose=False)
+    print('==============================')
+    print('GFLOPs : {:.2f}'.format(flops / 1e9 * 2))
+    print('Params : {:.2f} M'.format(params / 1e6))