@@ -37,13 +37,19 @@ def parse_args():
37
37
group_gpus .add_argument (
38
38
'--gpus' ,
39
39
type = int ,
40
- help = 'number of gpus to use '
40
+ help = '(Deprecated, please use --gpu-id) number of gpus to use '
41
41
'(only applicable to non-distributed training)' )
42
42
group_gpus .add_argument (
43
43
'--gpu-ids' ,
44
44
type = int ,
45
45
nargs = '+' ,
46
- help = 'ids of gpus to use '
46
+ help = '(Deprecated, please use --gpu-id) ids of gpus to use '
47
+ '(only applicable to non-distributed training)' )
48
+ group_gpus .add_argument (
49
+ '--gpu-id' ,
50
+ type = int ,
51
+ default = 0 ,
52
+ help = 'id of gpu to use '
47
53
'(only applicable to non-distributed training)' )
48
54
parser .add_argument ('--seed' , type = int , default = None , help = 'random seed' )
49
55
parser .add_argument (
@@ -113,20 +119,23 @@ def main():
113
119
if args .resume_from is not None :
114
120
cfg .resume_from = args .resume_from
115
121
cfg .auto_resume = args .auto_resume
122
+ if args .gpus is not None :
123
+ cfg .gpu_ids = range (1 )
124
+ warnings .warn ('`--gpus` is deprecated because we only support '
125
+ 'single GPU mode in non-distributed training. '
126
+ 'Use `gpus=1` now.' )
116
127
if args .gpu_ids is not None :
117
- cfg .gpu_ids = args .gpu_ids
118
- else :
119
- cfg .gpu_ids = range (1 ) if args .gpus is None else range (args .gpus )
128
+ cfg .gpu_ids = args .gpu_ids [0 :1 ]
129
+ warnings .warn ('`--gpu-ids` is deprecated, please use `--gpu-id`. '
130
+ 'Because we only support single GPU mode in '
131
+ 'non-distributed training. Use the first GPU '
132
+ 'in `gpu_ids` now.' )
133
+ if args .gpus is None and args .gpu_ids is None :
134
+ cfg .gpu_ids = [args .gpu_id ]
120
135
121
136
# init distributed env first, since logger depends on the dist info.
122
137
if args .launcher == 'none' :
123
138
distributed = False
124
- if len (cfg .gpu_ids ) > 1 :
125
- warnings .warn (
126
- f'We treat { cfg .gpu_ids } as gpu-ids, and reset to '
127
- f'{ cfg .gpu_ids [0 :1 ]} as gpu-ids to avoid potential error in '
128
- 'non-distribute training time.' )
129
- cfg .gpu_ids = cfg .gpu_ids [0 :1 ]
130
139
else :
131
140
distributed = True
132
141
init_dist (args .launcher , ** cfg .dist_params )
0 commit comments