yjh0410 há 2 anos atrás
pai
commit
d72a810b33

+ 4 - 4
models/detectors/lodet/lodet_backbone.py

@@ -27,22 +27,22 @@ class ScaleModulationNet(nn.Module):
         # P2/4
         self.layer_2 = nn.Sequential(   
             DSBlock(16, act_type, norm_type, depthwise),             
-            SMBlock(32, None, act_type, norm_type, depthwise)
+            SMBlock(32, 32, act_type, norm_type, depthwise)
         )
         # P3/8
         self.layer_3 = nn.Sequential(
             DSBlock(32, act_type, norm_type, depthwise),             
-            SMBlock(64, None, act_type, norm_type, depthwise)
+            SMBlock(64, 64, act_type, norm_type, depthwise)
         )
         # P4/16
         self.layer_4 = nn.Sequential(
             DSBlock(64, act_type, norm_type, depthwise),             
-            SMBlock(128, None, act_type, norm_type, depthwise)
+            SMBlock(128, 128, act_type, norm_type, depthwise)
         )
         # P5/32
         self.layer_5 = nn.Sequential(
             DSBlock(128, act_type, norm_type, depthwise),             
-            SMBlock(256, None, act_type, norm_type, depthwise)
+            SMBlock(256, 256, act_type, norm_type, depthwise)
         )
 
 

+ 2 - 2
models/detectors/lodet/lodet_basic.py

@@ -85,7 +85,7 @@ class Conv(nn.Module):
 # ---------------------------- Core Modules ----------------------------
 ## Scale Modulation Block
 class SMBlock(nn.Module):
-    def __init__(self, in_dim, out_dim=None, act_type='silu', norm_type='BN', depthwise=False):
+    def __init__(self, in_dim, out_dim, act_type='silu', norm_type='BN', depthwise=False):
         super(SMBlock, self).__init__()
         # -------------- Basic parameters --------------
         self.in_dim = in_dim
@@ -111,7 +111,7 @@ class SMBlock(nn.Module):
 
         # Output proj
         self.out_proj = None
-        if out_dim is not None:
+        if in_dim != out_dim:
             self.out_proj = Conv(self.inter_dim*2, out_dim, k=1, act_type=act_type, norm_type=norm_type)