-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrain_net.py
99 lines (84 loc) · 2.75 KB
/
train_net.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
"""
Main script to train DAOD models.
"""
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.engine import default_argument_parser, default_setup, launch
from daod.config import add_config
from daod.data.datasets import register_all_datasets
from daod.engine.trainers import (
BaseTrainer,
BaseWQTrainer,
BaseMosaicTrainer,
DATrainer,
AdaptiveTeacherTrainer,
SourceFreeAdaptiveTeacherTrainer,
BaseMixupTrainer,
BaseMosaicWQTrainer,
BaseMosaicWQNewTrainer,
SourceFreeAdaptiveTeacherSingleTrainer
)
# To register the architectures
from daod.modeling.meta_arch import *
from daod.modeling.proposal_generator import *
from daod.modeling.roi_heads import *
from daod.engine.trainers import base
import torch
torch.multiprocessing.set_sharing_strategy('file_system')
def setup(args):
"""Setup config."""
cfg = get_cfg()
add_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
"""Main script."""
cfg = setup(args)
if cfg.TRAINER == "base":
Trainer = BaseTrainer
elif cfg.TRAINER == "base_wq":
Trainer = BaseWQTrainer
elif cfg.TRAINER == "base_mosaic":
Trainer = BaseMosaicTrainer
elif cfg.TRAINER == "base_mosaic_wq":
Trainer = BaseMosaicWQTrainer
elif cfg.TRAINER == "base_mosaic_wq_new":
Trainer = BaseMosaicWQNewTrainer
elif cfg.TRAINER == "base_mixup":
Trainer = BaseMixupTrainer
elif cfg.TRAINER == "source_free_adaptive_teacher_single":
Trainer = SourceFreeAdaptiveTeacherSingleTrainer
elif cfg.TRAINER == "da":
Trainer = DATrainer
elif cfg.TRAINER == "adaptive_teacher":
Trainer = AdaptiveTeacherTrainer
elif cfg.TRAINER == "source_free_adaptive_teacher":
Trainer = SourceFreeAdaptiveTeacherTrainer
else:
raise ValueError(f"Trainer {cfg.TRAINER} not found.")
register_all_datasets(cfg)
if args.eval_only:
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
#return Trainer.test(cfg, model)
# return Trainer.test_refinement(cfg, model)
return base.test_refinement(cfg, model)
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.resume)
return trainer.train()
if __name__ == "__main__":
args = default_argument_parser().parse_args()
print("Command line args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)