@@ -19,6 +19,15 @@ def train_network(
19
19
20
20
Explanation here (Available just in Spanish):
21
21
https://drive.google.com/file/d/1QTEbRVgevfK8QJ30tWcEbaNbBaKnvGWv/view?usp=sharing
22
+
23
+ >>> import numpy as np
24
+ >>> x_train = np.array([[0.1, 0.2], [0.4, 0.6]])
25
+ >>> y_train = np.array([[1], [0]])
26
+ >>> neurons = 2
27
+ >>> epochs = 10
28
+ >>> result = train_network(neurons, x_train, y_train, epochs)
29
+ >>> all(part is not None for part in result)
30
+ True
22
31
"""
23
32
mu = 0.2
24
33
lambda_ = 1e-4
@@ -76,35 +85,47 @@ def relu(input_: np.array) -> np.array:
76
85
"""
77
86
Relu activation function
78
87
Hidden Layer due to it is less susceptible to vanish gradient
88
+
89
+ >>> relu(np.array([[0, -1, 2, 3, 0], [0, -1, -2, -3, 5]]))
90
+ array([[0, 0, 2, 3, 0],
91
+ [0, 0, 0, 0, 5]])
79
92
"""
80
- for i in np .arange (0 , len (input_ )):
81
- input_ [i , 0 ] = max (input_ [i , 0 ], 0 )
82
- return input_
93
+ return np .maximum (input_ , 0 )
83
94
84
95
85
96
def d_relu (input_ : np .array ) -> np .array :
86
97
"""
87
98
Relu Activation derivate function
99
+ >>> d_relu(np.array([[0, -1, 2, 3, 0], [0, -1, -2, -3, 5]]))
100
+ array([[1, 0, 1, 1, 1],
101
+ [1, 0, 0, 0, 1]])
88
102
"""
89
103
for i in np .arange (0 , len (input_ )):
90
- if input_ [i , 0 ] >= 0 :
91
- input_ [i , 0 ] = 1
92
- else :
93
- input_ [i , 0 ] = 0
104
+ for j in np .arange (0 , len (input_ [i ])):
105
+ if input_ [i , j ] >= 0 :
106
+ input_ [i , j ] = 1
107
+ else :
108
+ input_ [i , j ] = 0
94
109
return input_
95
110
96
111
97
112
def sigmoid (input_ : float ) -> float :
98
113
"""
99
114
Sigmoid activation function
100
115
Output layer
116
+
117
+ >>> sigmoid(4)
118
+ 0.9820137900379085
101
119
"""
102
120
return 1 / (1 + np .exp (- input_ ))
103
121
104
122
105
123
def d_sigmoid (input_ : float ) -> float :
106
124
"""
107
125
Sigmoid activation derivate
126
+
127
+ >>> d_sigmoid(4)
128
+ 0.017662706213291114
108
129
"""
109
130
return sigmoid (input_ ) ** 2 * np .exp (- input_ )
110
131
@@ -119,7 +140,7 @@ def main() -> None:
119
140
120
141
Before train the neural network, the data is normalized to [0 1] interval
121
142
122
- The function trainNetwork () returns the weight and bias matrix to apply the
143
+ The function train_network () returns the weight and bias matrix to apply the
123
144
transfer function to predict the output
124
145
"""
125
146
@@ -137,7 +158,8 @@ def main() -> None:
137
158
y_train = target [0 :train ]
138
159
y_test = target [train :]
139
160
140
- epochs = 50
161
+ # play with epochs and neuron numbers
162
+ epochs = 5
141
163
neurons = 5
142
164
w_co , bias_co , w_cs , bias_cs , error = train_network (
143
165
neurons , x_train , y_train , epochs
@@ -152,4 +174,7 @@ def main() -> None:
152
174
153
175
154
176
if __name__ == "__main__" :
177
+ import doctest
178
+
179
+ doctest .testmod ()
155
180
main ()
0 commit comments