forked from TheAlgorithms/Python
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsigmoid_like.py
56 lines (44 loc) · 1.58 KB
/
sigmoid_like.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import numpy as np
def sigmoid(vector: np.ndarray) -> np.ndarray:
"""
The standard sigmoid function.
Args:
vector: (np.ndarray): The input array.
Returns:
np.ndarray: The result of the sigmoid activation applied to the input array.
>>> np.linalg.norm(np.array([0.5, 0.66666667, 0.83333333]) \
- sigmoid(vector=np.array([0, np.log(2), np.log(5)]))) < 10**(-5)
True
"""
return 1 / (1 + np.exp(-1 * vector))
def swish(vector: np.ndarray, beta: float) -> np.ndarray:
"""
Swish activation: https://arxiv.org/abs/1710.05941v2
Args:
vector: (np.ndarray): The input array.
beta: (float)
Returns:
np.ndarray: The result of the swish activation applied to the input array.
>>> np.linalg.norm(np.array([0.5, 1., 1.5]) \
- swish(np.array([1, 2, 3]), 0)) < 10**(-5)
True
>>> np.linalg.norm(np.array([0, 0.66666667, 1.6]) \
- swish(np.array([0, 1, 2]), np.log(2))) < 10**(-5)
True
"""
return vector / (1 + np.exp(-beta * vector))
def sigmoid_linear_unit(vector: np.ndarray) -> np.ndarray:
"""
SiLU activation: https://arxiv.org/abs/1606.08415
Args:
vector: (np.ndarray): The input array.
Returns:
np.ndarray: The result of the sigmoid linear unit applied to the input array.
>>> np.linalg.norm(np.array([0, 0.7310585, 0.462098]) \
- sigmoid_linear_unit(np.array([0, 1, np.log(2)]))) < 10**(-5)
True
"""
return vector / (1 + np.exp(-1 * vector))
if __name__ == "__main__":
import doctest
doctest.testmod()