conv.py 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. import torch.nn as nn
  2. def get_conv2d(c1, c2, k, p, s, d, g):
  3. conv = nn.Conv2d(c1, c2, k, stride=s, padding=p, dilation=d, groups=g)
  4. return conv
  5. def get_activation(act_type=None):
  6. if act_type is None:
  7. return nn.Identity()
  8. elif act_type == 'relu':
  9. return nn.ReLU(inplace=True)
  10. elif act_type == 'lrelu':
  11. return nn.LeakyReLU(0.1, inplace=True)
  12. elif act_type == 'mish':
  13. return nn.Mish(inplace=True)
  14. elif act_type == 'silu':
  15. return nn.SiLU(inplace=True)
  16. elif act_type == 'gelu':
  17. return nn.GELU()
  18. else:
  19. raise NotImplementedError(act_type)
  20. def get_norm(norm_type, dim):
  21. if norm_type == 'BN':
  22. return nn.BatchNorm2d(dim)
  23. elif norm_type == 'GN':
  24. return nn.GroupNorm(num_groups=32, num_channels=dim)
  25. elif norm_type is None:
  26. return nn.Identity()
  27. else:
  28. raise NotImplementedError(norm_type)
  29. # ----------------- CNN ops -----------------
  30. class ConvModule(nn.Module):
  31. def __init__(self,
  32. c1,
  33. c2,
  34. k=1,
  35. p=0,
  36. s=1,
  37. d=1,
  38. act_type='relu',
  39. norm_type='BN',
  40. depthwise=False):
  41. super(ConvModule, self).__init__()
  42. convs = []
  43. if depthwise:
  44. convs.append(get_conv2d(c1, c1, k=k, p=p, s=s, d=d, g=c1))
  45. # depthwise conv
  46. if norm_type:
  47. convs.append(get_norm(norm_type, c1))
  48. if act_type:
  49. convs.append(get_activation(act_type))
  50. # pointwise conv
  51. convs.append(get_conv2d(c1, c2, k=1, p=0, s=1, d=d, g=1))
  52. if norm_type:
  53. convs.append(get_norm(norm_type, c2))
  54. if act_type:
  55. convs.append(get_activation(act_type))
  56. else:
  57. convs.append(get_conv2d(c1, c2, k=k, p=p, s=s, d=d, g=1))
  58. if norm_type:
  59. convs.append(get_norm(norm_type, c2))
  60. if act_type:
  61. convs.append(get_activation(act_type))
  62. self.convs = nn.Sequential(*convs)
  63. def forward(self, x):
  64. return self.convs(x)