@@ -32,7 +32,7 @@ class ParamScheduler(metaclass=ABCMeta):
32
32
More precisely, whatever the state of the optimizer (newly created or used by another scheduler) the scheduler
33
33
sets defined absolute values.
34
34
35
- .. versionadded:: 0.5.1
35
+ .. versionadded:: 0.4.5
36
36
"""
37
37
38
38
def __init__ (
@@ -251,7 +251,7 @@ class CyclicalScheduler(ParamScheduler):
251
251
If the scheduler is bound to an 'ITERATION_*' event, 'cycle_size' should
252
252
usually be the number of batches in an epoch.
253
253
254
- .. versionadded:: 0.5.1
254
+ .. versionadded:: 0.4.5
255
255
"""
256
256
257
257
def __init__ (
@@ -340,7 +340,7 @@ class LinearCyclicalScheduler(CyclicalScheduler):
340
340
# over the course of 1 epoch
341
341
#
342
342
343
- .. versionadded:: 0.5.1
343
+ .. versionadded:: 0.4.5
344
344
"""
345
345
346
346
def get_param (self ) -> float :
@@ -408,7 +408,7 @@ class CosineAnnealingScheduler(CyclicalScheduler):
408
408
.. [Smith17] Smith, Leslie N. "Cyclical learning rates for training neural networks."
409
409
Applications of Computer Vision (WACV), 2017 IEEE Winter Conference on. IEEE, 2017
410
410
411
- .. versionadded:: 0.5.1
411
+ .. versionadded:: 0.4.5
412
412
"""
413
413
414
414
def get_param (self ) -> float :
@@ -449,7 +449,7 @@ class ConcatScheduler(ParamScheduler):
449
449
# The annealing cycles are repeated indefinitely.
450
450
#
451
451
452
- .. versionadded:: 0.5.1
452
+ .. versionadded:: 0.4.5
453
453
"""
454
454
455
455
def __init__ (self , schedulers : List [ParamScheduler ], durations : List [int ], save_history : bool = False ):
@@ -675,7 +675,7 @@ class LRScheduler(ParamScheduler):
675
675
# the first lr value from the optimizer, otherwise it is will be skipped:
676
676
trainer.add_event_handler(Events.ITERATION_COMPLETED, scheduler)
677
677
678
- .. versionadded:: 0.5.1
678
+ .. versionadded:: 0.4.5
679
679
"""
680
680
681
681
def __init__ (self , lr_scheduler : _LRScheduler , save_history : bool = False ):
@@ -806,7 +806,7 @@ def create_lr_scheduler_with_warmup(
806
806
# Attach to the trainer
807
807
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
808
808
809
- .. versionadded:: 0.5.1
809
+ .. versionadded:: 0.4.5
810
810
"""
811
811
if not isinstance (lr_scheduler , (ParamScheduler , _LRScheduler )):
812
812
raise TypeError (
@@ -905,7 +905,7 @@ class PiecewiseLinear(ParamScheduler):
905
905
# from 0.3 to 0.1 between 21st and 30th iterations and remains 0.1 until the end of the iterations.
906
906
#
907
907
908
- .. versionadded:: 0.5.1
908
+ .. versionadded:: 0.4.5
909
909
"""
910
910
911
911
def __init__ (
@@ -995,7 +995,7 @@ class ParamGroupScheduler:
995
995
# Attach single scheduler to the trainer
996
996
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
997
997
998
- .. versionadded:: 0.5.1
998
+ .. versionadded:: 0.4.5
999
999
"""
1000
1000
1001
1001
def __init__ (self , schedulers : List [ParamScheduler ], names : Optional [List [str ]] = None , save_history : bool = False ):
0 commit comments