We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
FitLoop
1 parent 65b3dc4 commit eacece1Copy full SHA for eacece1
pytorch_lightning/loops/fit_loop.py
@@ -13,7 +13,6 @@
13
# limitations under the License.
14
15
import logging
16
-from contextlib import suppress
17
from typing import Any, Dict, Optional
18
19
from pytorch_lightning.loops import Loop
@@ -181,8 +180,7 @@ def on_advance_start(self) -> None:
181
180
self.trainer.train_dataloader.load_state_dict(self._dataloader_state_dict)
182
self._dataloader_state_dict = {}
183
184
- # TODO: specify the possible exception
185
- with suppress(Exception):
+ if callable(getattr(self.trainer.train_dataloader.sampler, "set_epoch", None)):
186
# set seed for distributed sampler (enables shuffling for each epoch)
187
self.trainer.train_dataloader.sampler.set_epoch(self.current_epoch)
188
0 commit comments