Skip to content

Commit 153cb49

Browse files
committed
changed line continuation method
1 parent 45932a0 commit 153cb49

File tree

1 file changed

+28
-28
lines changed

1 file changed

+28
-28
lines changed

Diff for: machine_learning/loss_functions/perplexity_loss.py

+28-28
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
"""
2-
Compute the Perplexity which useful in predicting language model
2+
Compute the Perplexity which useful in predicting language model
33
accuracy in Natural Language Processing (NLP.)
44
Perplexity is measure of how certain the model in its predictions.
55
6-
Formula :
6+
Formula :
77
88
Perplexity Loss = exp(-1/N (Σ ln(p(x)))
99
@@ -29,45 +29,45 @@ def perplexity_loss(y_true: np.ndarray, y_pred: np.ndarray) -> float:
2929
Perplexity loss between y_true and y_pred.
3030
3131
>>> y_true = np.array([[1, 4], [2, 3]])
32-
>>> y_pred = np.array( \
33-
[[[0.28, 0.19, 0.21 , 0.15, 0.15], \
34-
[0.24, 0.19, 0.09, 0.18, 0.27]], \
35-
[[0.03, 0.26, 0.21, 0.18, 0.30], \
36-
[0.28, 0.10, 0.33, 0.15, 0.12]]]\
37-
)
32+
>>> y_pred = np.array(
33+
... [[[0.28, 0.19, 0.21 , 0.15, 0.15],
34+
... [0.24, 0.19, 0.09, 0.18, 0.27]],
35+
... [[0.03, 0.26, 0.21, 0.18, 0.30],
36+
... [0.28, 0.10, 0.33, 0.15, 0.12]]]
37+
... )
3838
>>> perplexity_loss(y_true, y_pred)
3939
5.024732177979022
4040
>>> y_true = np.array([[1, 4], [2, 3]])
41-
>>> y_pred = np.array( \
42-
[[[0.28, 0.19, 0.21 , 0.15, 0.15], \
43-
[0.24, 0.19, 0.09, 0.18, 0.27], \
44-
[0.30, 0.10, 0.20, 0.15, 0.25]], \
45-
[[0.03, 0.26, 0.21, 0.18, 0.30], \
46-
[0.28, 0.10, 0.33, 0.15, 0.12], \
47-
[0.30, 0.10, 0.20, 0.15, 0.25]],] \
48-
)
41+
>>> y_pred = np.array(
42+
... [[[0.28, 0.19, 0.21 , 0.15, 0.15],
43+
... [0.24, 0.19, 0.09, 0.18, 0.27],
44+
... [0.30, 0.10, 0.20, 0.15, 0.25]],
45+
... [[0.03, 0.26, 0.21, 0.18, 0.30],
46+
... [0.28, 0.10, 0.33, 0.15, 0.12],
47+
... [0.30, 0.10, 0.20, 0.15, 0.25]],]
48+
... )
4949
>>> perplexity_loss(y_true, y_pred)
5050
Traceback (most recent call last):
5151
...
5252
ValueError: Sentence length of y_true and y_pred must be equal.
5353
>>> y_true = np.array([[1, 4], [2, 11]])
54-
>>> y_pred = np.array( \
55-
[[[0.28, 0.19, 0.21 , 0.15, 0.15], \
56-
[0.24, 0.19, 0.09, 0.18, 0.27]], \
57-
[[0.03, 0.26, 0.21, 0.18, 0.30], \
58-
[0.28, 0.10, 0.33, 0.15, 0.12]]]\
59-
)
54+
>>> y_pred = np.array(
55+
... [[[0.28, 0.19, 0.21 , 0.15, 0.15],
56+
... [0.24, 0.19, 0.09, 0.18, 0.27]],
57+
... [[0.03, 0.26, 0.21, 0.18, 0.30],
58+
... [0.28, 0.10, 0.33, 0.15, 0.12]]]
59+
... )
6060
>>> perplexity_loss(y_true, y_pred)
6161
Traceback (most recent call last):
6262
...
6363
ValueError: Label value must not be greater than vocabulary size.
6464
>>> y_true = np.array([[1, 4]])
65-
>>> y_pred = np.array( \
66-
[[[0.28, 0.19, 0.21 , 0.15, 0.15], \
67-
[0.24, 0.19, 0.09, 0.18, 0.27]], \
68-
[[0.03, 0.26, 0.21, 0.18, 0.30], \
69-
[0.28, 0.10, 0.33, 0.15, 0.12]]]\
70-
)
65+
>>> y_pred = np.array(
66+
... [[[0.28, 0.19, 0.21 , 0.15, 0.15],
67+
... [0.24, 0.19, 0.09, 0.18, 0.27]],
68+
... [[0.03, 0.26, 0.21, 0.18, 0.30],
69+
... [0.28, 0.10, 0.33, 0.15, 0.12]]]
70+
... )
7171
>>> perplexity_loss(y_true, y_pred)
7272
Traceback (most recent call last):
7373
...

0 commit comments

Comments
 (0)