Skip to content

Commit 9367b3b

Browse files
author
evan.zhang5
committed
fix: issue #12233 ,added a small constant beta to the numerator and denominator and added a test case
1 parent 03a4251 commit 9367b3b

File tree

1 file changed

+9
-3
lines changed

1 file changed

+9
-3
lines changed

machine_learning/loss_functions.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ def categorical_cross_entropy(
151151
def categorical_focal_cross_entropy(
152152
y_true: np.ndarray,
153153
y_pred: np.ndarray,
154-
alpha: np.ndarray = None,
154+
alpha: np.ndarray | None = None,
155155
gamma: float = 2.0,
156156
epsilon: float = 1e-15,
157157
) -> float:
@@ -648,7 +648,11 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
648648
>>> true_labels = np.array([0.2, 0.3, 0.5])
649649
>>> predicted_probs = np.array([0.3, 0.3, 0.4])
650650
>>> float(kullback_leibler_divergence(true_labels, predicted_probs))
651-
0.030478754035472025
651+
0.0304787540354719
652+
>>> true_labels = np.array([0, 0.5, 0.5])
653+
>>> predicted_probs = np.array([0.3, 0.3, 0.4])
654+
>>> float(kullback_leibler_divergence(true_labels, predicted_probs))
655+
0.3669845875400667
652656
>>> true_labels = np.array([0.2, 0.3, 0.5])
653657
>>> predicted_probs = np.array([0.3, 0.3, 0.4, 0.5])
654658
>>> kullback_leibler_divergence(true_labels, predicted_probs)
@@ -658,7 +662,9 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
658662
"""
659663
if len(y_true) != len(y_pred):
660664
raise ValueError("Input arrays must have the same length.")
661-
665+
beta = 1e-15
666+
y_true = y_true + beta
667+
y_pred = y_pred + beta
662668
kl_loss = y_true * np.log(y_true / y_pred)
663669
return np.sum(kl_loss)
664670

0 commit comments

Comments
 (0)