浏览代码

linear LR Scheduler for FreeYOLOv1-L

yjh0410 2 年之前
父节点
当前提交
d3d81af84e
共有 4 个文件被更改,包括 8 次插入9 次删除
  1. 2 2
      config/model_config/yolo_free_v1_config.py
  2. 2 2
      train.sh
  3. 1 1
      train_ddp.sh
  4. 3 4
      utils/solver/lr_scheduler.py

+ 2 - 2
config/model_config/yolo_free_v1_config.py

@@ -62,7 +62,7 @@ yolo_free_v1_cfg = {
         'ema_decay': 0.9998,       # SGD: 0.9999;   AdamW: 0.9998
         'ema_tau': 2000,
         ## LR schedule
-        'scheduler': 'cos_linear',
+        'scheduler': 'linear',
         'lr0': 0.001,              # SGD: 0.01;     AdamW: 0.001
         'lrf': 0.01,               # SGD: 0.01;     AdamW: 0.01
         'warmup_momentum': 0.8,
@@ -129,7 +129,7 @@ yolo_free_v1_cfg = {
         'ema_decay': 0.9998,       # SGD: 0.9999;   AdamW: 0.9998
         'ema_tau': 2000,
         ## LR schedule
-        'scheduler': 'cos_linear',
+        'scheduler': 'linear',
         'lr0': 0.001,              # SGD: 0.01;     AdamW: 0.001
         'lrf': 0.01,               # SGD: 0.01;     AdamW: 0.01
         'warmup_momentum': 0.8,

+ 2 - 2
train.sh

@@ -3,7 +3,7 @@ python train.py \
         --cuda \
         -d coco \
         --root /mnt/share/ssd2/dataset/ \
-        -m yolox_m \
+        -m yolo_free_v1_l \
         -bs 16 \
         -size 640 \
         --wp_epoch 3 \
@@ -12,7 +12,7 @@ python train.py \
         --ema \
         --fp16 \
         --multi_scale \
-        --resume weights/coco/yolox_m/yolox_m_best.pth \
+        # --resume weights/coco/yolox_m/yolox_m_best.pth \
         # --pretrained weights/coco/yolo_free_medium/yolo_free_medium_39.46.pth \
         # --eval_first
 

+ 1 - 1
train_ddp.sh

@@ -5,7 +5,7 @@ python -m torch.distributed.run --nproc_per_node=8 train.py \
                                                     -dist \
                                                     -d coco \
                                                     --root /data/datasets/ \
-                                                    -m yolovx_l \
+                                                    -m yolo_free_v1_l \
                                                     -bs 128 \
                                                     -size 640 \
                                                     --wp_epoch 3 \

+ 3 - 4
utils/solver/lr_scheduler.py

@@ -6,14 +6,13 @@ def build_lr_scheduler(cfg, optimizer, epochs):
     """Build learning rate scheduler from cfg file."""
     print('==============================')
     print('Lr Scheduler: {}'.format(cfg['scheduler']))
-
+    # Cosine LR scheduler
     if cfg['scheduler'] == 'cosine':
         lf = lambda x: ((1 - math.cos(x * math.pi / epochs)) / 2) * (cfg['lrf'] - 1) + 1
-        
+    # Linear LR scheduler
     elif cfg['scheduler'] == 'linear':
         lf = lambda x: (1 - x / epochs) * (1.0 - cfg['lrf']) + cfg['lrf']
-    elif cfg['scheduler'] == 'cos_linear':
-            lf = lambda x: (1 - x / epochs) * (1.0 - cfg['lrf']) + cfg['lrf'] if x > epochs // 2 else ((1 - math.cos(x * math.pi / epochs)) / 2) * (cfg['lrf'] - 1) + 1
+
     else:
         print('unknown lr scheduler.')
         exit(0)