@@ -151,7 +151,7 @@ def categorical_cross_entropy(
151
151
def categorical_focal_cross_entropy (
152
152
y_true : np .ndarray ,
153
153
y_pred : np .ndarray ,
154
- alpha : np .ndarray = None ,
154
+ alpha : np .ndarray | None = None ,
155
155
gamma : float = 2.0 ,
156
156
epsilon : float = 1e-15 ,
157
157
) -> float :
@@ -648,7 +648,11 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
648
648
>>> true_labels = np.array([0.2, 0.3, 0.5])
649
649
>>> predicted_probs = np.array([0.3, 0.3, 0.4])
650
650
>>> float(kullback_leibler_divergence(true_labels, predicted_probs))
651
- 0.030478754035472025
651
+ 0.0304787540354719
652
+ >>> true_labels = np.array([0, 0.5, 0.5])
653
+ >>> predicted_probs = np.array([0.3, 0.3, 0.4])
654
+ >>> float(kullback_leibler_divergence(true_labels, predicted_probs))
655
+ 0.3669845875400667
652
656
>>> true_labels = np.array([0.2, 0.3, 0.5])
653
657
>>> predicted_probs = np.array([0.3, 0.3, 0.4, 0.5])
654
658
>>> kullback_leibler_divergence(true_labels, predicted_probs)
@@ -658,7 +662,9 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
658
662
"""
659
663
if len (y_true ) != len (y_pred ):
660
664
raise ValueError ("Input arrays must have the same length." )
661
-
665
+ beta = 1e-15
666
+ y_true = y_true + beta
667
+ y_pred = y_pred + beta
662
668
kl_loss = y_true * np .log (y_true / y_pred )
663
669
return np .sum (kl_loss )
664
670
0 commit comments