yjh0410 1 năm trước cách đây
mục cha
commit
4a8fa0da6f
3 tập tin đã thay đổi với 6 bổ sung18 xóa
  1. 1 1
      odlab/train.py
  2. 2 14
      odlab/train.sh
  3. 3 3
      odlab/utils/lr_scheduler.py

+ 1 - 1
odlab/main.py → odlab/train.py

@@ -137,7 +137,7 @@ def main():
 
 
     # ---------------------------- Build LR Scheduler ----------------------------
     # ---------------------------- Build LR Scheduler ----------------------------
     cfg.warmup_iters = cfg.warmup_iters * cfg.grad_accumulate
     cfg.warmup_iters = cfg.warmup_iters * cfg.grad_accumulate
-    wp_lr_scheduler = build_wp_lr_scheduler(cfg, cfg.base_lr)
+    wp_lr_scheduler = build_wp_lr_scheduler(cfg)
     lr_scheduler    = build_lr_scheduler(cfg, optimizer, args.resume)
     lr_scheduler    = build_lr_scheduler(cfg, optimizer, args.resume)
 
 
     # ---------------------------- Build DDP model ----------------------------
     # ---------------------------- Build DDP model ----------------------------

+ 2 - 14
odlab/train.sh

@@ -12,23 +12,11 @@ elif [[ $MODEL == *"fcos"* ]]; then
     # Epoch setting
     # Epoch setting
     BATCH_SIZE=16
     BATCH_SIZE=16
     EVAL_EPOCH=2
     EVAL_EPOCH=2
-elif [[ $MODEL == *"retinanet"* ]]; then
-    # Epoch setting
-    BATCH_SIZE=16
-    EVAL_EPOCH=2
-elif [[ $MODEL == *"plain_detr"* ]]; then
-    # Epoch setting
-    BATCH_SIZE=16
-    EVAL_EPOCH=2
-elif [[ $MODEL == *"rtdetr"* ]]; then
-    # Epoch setting
-    BATCH_SIZE=16
-    EVAL_EPOCH=1
 fi
 fi
 
 
 # -------------------------- Train Pipeline --------------------------
 # -------------------------- Train Pipeline --------------------------
 if [ $WORLD_SIZE == 1 ]; then
 if [ $WORLD_SIZE == 1 ]; then
-    python main.py \
+    python train.py \
         --cuda \
         --cuda \
         --dataset ${DATASET}  \
         --dataset ${DATASET}  \
         --root ${DATA_ROOT} \
         --root ${DATA_ROOT} \
@@ -37,7 +25,7 @@ if [ $WORLD_SIZE == 1 ]; then
         --eval_epoch ${EVAL_EPOCH}
         --eval_epoch ${EVAL_EPOCH}
 elif [[ $WORLD_SIZE -gt 1 && $WORLD_SIZE -le 8 ]]; then
 elif [[ $WORLD_SIZE -gt 1 && $WORLD_SIZE -le 8 ]]; then
     python -m torch.distributed.run --nproc_per_node=$WORLD_SIZE --master_port ${MASTER_PORT}  \
     python -m torch.distributed.run --nproc_per_node=$WORLD_SIZE --master_port ${MASTER_PORT}  \
-        main.py \
+        train.py \
         --cuda \
         --cuda \
         --distributed \
         --distributed \
         --dataset ${DATASET}  \
         --dataset ${DATASET}  \

+ 3 - 3
odlab/utils/lr_scheduler.py

@@ -23,15 +23,15 @@ class LinearWarmUpScheduler(object):
         self.set_lr(optimizer, tmp_lr)
         self.set_lr(optimizer, tmp_lr)
         
         
 ## Build WP LR Scheduler
 ## Build WP LR Scheduler
-def build_wp_lr_scheduler(cfg, base_lr=0.01):
+def build_wp_lr_scheduler(cfg):
     print('==============================')
     print('==============================')
     print('WarmUpScheduler: {}'.format(cfg.warmup))
     print('WarmUpScheduler: {}'.format(cfg.warmup))
-    print('--base_lr: {}'.format(base_lr))
+    print('--base_lr: {}'.format(cfg.base_lr))
     print('--warmup_iters: {}'.format(cfg.warmup_iters))
     print('--warmup_iters: {}'.format(cfg.warmup_iters))
     print('--warmup_factor: {}'.format(cfg.warmup_factor))
     print('--warmup_factor: {}'.format(cfg.warmup_factor))
 
 
     if cfg.warmup == 'linear':
     if cfg.warmup == 'linear':
-        wp_lr_scheduler = LinearWarmUpScheduler(base_lr, cfg.warmup_iters, cfg.warmup_factor)
+        wp_lr_scheduler = LinearWarmUpScheduler(cfg.base_lr, cfg.warmup_iters, cfg.warmup_factor)
     
     
     return wp_lr_scheduler
     return wp_lr_scheduler