Skip to content

Commit 6c45739

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 1f98734 commit 6c45739

File tree

1 file changed

+7
-3
lines changed

1 file changed

+7
-3
lines changed

machine_learning/loss_functions.py

+7-3
Original file line numberDiff line numberDiff line change
@@ -659,9 +659,13 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
659659
if len(y_true) != len(y_pred):
660660
raise ValueError("Input arrays must have the same length.")
661661

662-
kl_loss = np.concatenate((y_true[None, :], y_pred[None, :])) # true probs in first row and predicted in second
663-
kl_loss = kl_loss[:, np.any(kl_loss == 0, axis=0) == False] # Filtered zero probabilities from both probability arrays
664-
kl_loss = kl_loss[0] * np.log(kl_loss[0] / kl_loss[1]) # Calculating safely now
662+
kl_loss = np.concatenate(
663+
(y_true[None, :], y_pred[None, :])
664+
) # true probs in first row and predicted in second
665+
kl_loss = kl_loss[
666+
:, np.any(kl_loss == 0, axis=0) == False
667+
] # Filtered zero probabilities from both probability arrays
668+
kl_loss = kl_loss[0] * np.log(kl_loss[0] / kl_loss[1]) # Calculating safely now
665669
return np.sum(kl_loss)
666670

667671

0 commit comments

Comments
 (0)