yolov7_backbone.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. import torch
  2. import torch.nn as nn
  3. try:
  4. from .yolov7_basic import Conv, ELANBlock, DownSample
  5. except:
  6. from yolov7_basic import Conv, ELANBlock, DownSample
  7. model_urls = {
  8. "elannet": "https://github.com/yjh0410/image_classification_pytorch/releases/download/weight/yolov7_elannet.pth",
  9. }
  10. # --------------------- CSPDarkNet-53 -----------------------
  11. # ELANNet
  12. class ELANNet(nn.Module):
  13. """
  14. ELAN-Net of YOLOv7-L.
  15. """
  16. def __init__(self, act_type='silu', norm_type='BN', depthwise=False):
  17. super(ELANNet, self).__init__()
  18. self.feat_dims = [512, 1024, 1024]
  19. # P1/2
  20. self.layer_1 = nn.Sequential(
  21. Conv(3, 32, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
  22. Conv(32, 64, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
  23. Conv(64, 64, k=3, p=1, act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  24. )
  25. # P2/4
  26. self.layer_2 = nn.Sequential(
  27. Conv(64, 128, k=3, p=1, s=2, act_type=act_type, norm_type=norm_type, depthwise=depthwise),
  28. ELANBlock(in_dim=128, out_dim=256, expand_ratio=0.5,
  29. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  30. )
  31. # P3/8
  32. self.layer_3 = nn.Sequential(
  33. DownSample(in_dim=256, act_type=act_type),
  34. ELANBlock(in_dim=256, out_dim=512, expand_ratio=0.5,
  35. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  36. )
  37. # P4/16
  38. self.layer_4 = nn.Sequential(
  39. DownSample(in_dim=512, act_type=act_type),
  40. ELANBlock(in_dim=512, out_dim=1024, expand_ratio=0.5,
  41. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  42. )
  43. # P5/32
  44. self.layer_5 = nn.Sequential(
  45. DownSample(in_dim=1024, act_type=act_type),
  46. ELANBlock(in_dim=1024, out_dim=1024, expand_ratio=0.25,
  47. act_type=act_type, norm_type=norm_type, depthwise=depthwise)
  48. )
  49. def forward(self, x):
  50. c1 = self.layer_1(x)
  51. c2 = self.layer_2(c1)
  52. c3 = self.layer_3(c2)
  53. c4 = self.layer_4(c3)
  54. c5 = self.layer_5(c4)
  55. outputs = [c3, c4, c5]
  56. return outputs
  57. # --------------------- Functions -----------------------
  58. def build_backbone(cfg, pretrained=False):
  59. """Constructs a ELANNet model.
  60. Args:
  61. pretrained (bool): If True, returns a model pre-trained on ImageNet
  62. """
  63. backbone = ELANNet(cfg['bk_act'], cfg['bk_norm'], cfg['bk_dpw'])
  64. feat_dims = backbone.feat_dims
  65. if pretrained:
  66. url = model_urls['elannet']
  67. if url is not None:
  68. print('Loading pretrained weight ...')
  69. checkpoint = torch.hub.load_state_dict_from_url(
  70. url=url, map_location="cpu", check_hash=True)
  71. # checkpoint state dict
  72. checkpoint_state_dict = checkpoint.pop("model")
  73. # model state dict
  74. model_state_dict = backbone.state_dict()
  75. # check
  76. for k in list(checkpoint_state_dict.keys()):
  77. if k in model_state_dict:
  78. shape_model = tuple(model_state_dict[k].shape)
  79. shape_checkpoint = tuple(checkpoint_state_dict[k].shape)
  80. if shape_model != shape_checkpoint:
  81. checkpoint_state_dict.pop(k)
  82. else:
  83. checkpoint_state_dict.pop(k)
  84. print(k)
  85. backbone.load_state_dict(checkpoint_state_dict)
  86. else:
  87. print('No backbone pretrained: ELANNet')
  88. return backbone, feat_dims
  89. if __name__ == '__main__':
  90. import time
  91. from thop import profile
  92. cfg = {
  93. 'pretrained': False,
  94. 'bk_act': 'silu',
  95. 'bk_norm': 'BN',
  96. 'bk_dpw': False,
  97. 'p6_feat': False,
  98. 'p7_feat': False,
  99. }
  100. model, feats = build_backbone(cfg)
  101. x = torch.randn(1, 3, 224, 224)
  102. t0 = time.time()
  103. outputs = model(x)
  104. t1 = time.time()
  105. print('Time: ', t1 - t0)
  106. for out in outputs:
  107. print(out.shape)
  108. x = torch.randn(1, 3, 224, 224)
  109. print('==============================')
  110. flops, params = profile(model, inputs=(x, ), verbose=False)
  111. print('==============================')
  112. print('GFLOPs : {:.2f}'.format(flops / 1e9 * 2))
  113. print('Params : {:.2f} M'.format(params / 1e6))