Skip to content

Commit 5a13341

Browse files
authored
[BugFix] Resolve deprec warning about warning (#3024)
1 parent c275988 commit 5a13341

File tree

1 file changed

+9
-7
lines changed

1 file changed

+9
-7
lines changed

torchrl/objectives/ppo.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
from tensordict.utils import NestedKey
3030
from torch import distributions as d
3131

32-
from torchrl._utils import _standardize, logger as torchrl_logger
32+
from torchrl._utils import _standardize, logger as torchrl_logger, VERBOSE
3333
from torchrl.objectives.common import LossModule
3434
from torchrl.objectives.utils import (
3535
_cache_values,
@@ -564,14 +564,16 @@ def _get_entropy(
564564
entropy = dist.entropy()
565565
if not entropy.isfinite().all():
566566
del entropy
567-
torchrl_logger.info(
568-
"Entropy is not finite. Using Monte Carlo sampling."
569-
)
567+
if VERBOSE:
568+
torchrl_logger.info(
569+
"Entropy is not finite. Using Monte Carlo sampling."
570+
)
570571
raise NotImplementedError
571572
except NotImplementedError:
572-
torchrl_logger.warn(
573-
f"Entropy not implemented for {type(dist)} or is not finite. Using Monte Carlo sampling."
574-
)
573+
if VERBOSE:
574+
torchrl_logger.warning(
575+
f"Entropy not implemented for {type(dist)} or is not finite. Using Monte Carlo sampling."
576+
)
575577
if getattr(dist, "has_rsample", False):
576578
x = dist.rsample((self.samples_mc_entropy,))
577579
else:

0 commit comments

Comments
 (0)