Skip to content

Commit b156d25

Browse files
committed
support more optimizer
1 parent a7a55af commit b156d25

File tree

1 file changed

+11
-1
lines changed

1 file changed

+11
-1
lines changed

basicsr/models/base_model.py

+11-1
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,18 @@ def get_optimizer(self, optim_type, params, lr, **kwargs):
105105
optimizer = torch.optim.Adam(params, lr, **kwargs)
106106
elif optim_type == 'AdamW':
107107
optimizer = torch.optim.AdamW(params, lr, **kwargs)
108+
elif optim_type == 'Adamax':
109+
optimizer = torch.optim.Adamax(params, lr, **kwargs)
110+
elif optim_type == 'SGD':
111+
optimizer = torch.optim.SGD(params, lr, **kwargs)
112+
elif optim_type == 'ASGD':
113+
optimizer = torch.optim.ASGD(params, lr, **kwargs)
114+
elif optim_type == 'RMSprop':
115+
optimizer = torch.optim.RMSprop(params, lr, **kwargs)
116+
elif optim_type == 'Rprop':
117+
optimizer = torch.optim.Rprop(params, lr, **kwargs)
108118
else:
109-
raise NotImplementedError(f'optimizer {optim_type} is not supperted yet.')
119+
raise NotImplementedError(f'optimizer {optim_type} is not supported yet.')
110120
return optimizer
111121

112122
def setup_schedulers(self):

0 commit comments

Comments
 (0)