Skip to content

Commit 616faac

Browse files
Merge pull request #106 from frmatias/master
Neural Network - Perceptron
2 parents f5fc930 + 6ee6f12 commit 616faac

File tree

4 files changed

+165
-7
lines changed

4 files changed

+165
-7
lines changed

Diff for: Graphs/Breadth_First_Search.py

+35
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,41 @@ def __init__(self, nodes):
77

88

99
def show(self):
10+
11+
for i in self.graph:
12+
for j in i:
13+
print(j, end=' ')
14+
print(' ')
15+
def bfs(self,v):
16+
17+
visited = [False]*self.vertex
18+
visited[v - 1] = True
19+
print('%d visited' % (v))
20+
21+
queue = [v - 1]
22+
while len(queue) > 0:
23+
v = queue[0]
24+
for u in range(self.vertex):
25+
if self.graph[v][u] == 1:
26+
if visited[u]== False:
27+
visited[u] = True
28+
queue.append(u)
29+
print('%d visited' % (u +1))
30+
queue.pop(0)
31+
32+
g = Graph(10)
33+
34+
g.add_edge(1,2)
35+
g.add_edge(1,3)
36+
g.add_edge(1,4)
37+
g.add_edge(2,5)
38+
g.add_edge(3,6)
39+
g.add_edge(3,7)
40+
g.add_edge(4,8)
41+
g.add_edge(5,9)
42+
g.add_edge(6,10)
43+
g.bfs(4)
44+
=======
1045
print self.graph
1146

1247
def add_edge(self, i, j):

Diff for: Graphs/Graph_list.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,14 @@ def show(self):
1515

1616

1717

18-
g = Graph(5)
18+
g = Graph(100)
1919

2020
g.add_edge(1,3)
2121
g.add_edge(2,3)
2222
g.add_edge(3,4)
2323
g.add_edge(3,5)
2424
g.add_edge(4,5)
2525

26+
2627
g.show()
2728

Diff for: Graphs/Graph_matrix.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,12 @@ def show(self):
1818

1919

2020

21-
g = Graph(5)
21+
g = Graph(100)
2222

23-
g.add_edge(1,3)
24-
g.add_edge(2,3)
25-
g.add_edge(3,4)
26-
g.add_edge(3,5)
23+
g.add_edge(1,4)
24+
g.add_edge(4,2)
2725
g.add_edge(4,5)
28-
26+
g.add_edge(2,5)
27+
g.add_edge(5,3)
2928
g.show()
3029

Diff for: Neural_Network/perceptron.py

+123
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
'''
2+
3+
Perceptron
4+
w = w + N * (d(k) - y) * x(k)
5+
6+
Using perceptron network for oil analysis,
7+
with Measuring of 3 parameters that represent chemical characteristics we can classify the oil, in p1 or p2
8+
p1 = -1
9+
p2 = 1
10+
11+
'''
12+
13+
import random
14+
15+
16+
class Perceptron:
17+
def __init__(self, sample, exit, learn_rate=0.01, epoch_number=1000, bias=-1):
18+
self.sample = sample
19+
self.exit = exit
20+
self.learn_rate = learn_rate
21+
self.epoch_number = epoch_number
22+
self.bias = bias
23+
self.number_sample = len(sample)
24+
self.col_sample = len(sample[0])
25+
self.weight = []
26+
27+
def trannig(self):
28+
for sample in self.sample:
29+
sample.insert(0, self.bias)
30+
31+
for i in range(self.col_sample):
32+
self.weight.append(random.random())
33+
34+
self.weight.insert(0, self.bias)
35+
36+
epoch_count = 0
37+
38+
while True:
39+
erro = False
40+
for i in range(self.number_sample):
41+
u = 0
42+
for j in range(self.col_sample + 1):
43+
u = u + self.weight[j] * self.sample[i][j]
44+
y = self.sign(u)
45+
if y != self.exit[i]:
46+
47+
for j in range(self.col_sample + 1):
48+
49+
self.weight[j] = self.weight[j] + self.learn_rate * (self.exit[i] - y) * self.sample[i][j]
50+
erro = True
51+
#print('Epoch: \n',epoch_count)
52+
epoch_count = epoch_count + 1
53+
# if you want controle the epoch or just by erro
54+
if erro == False:
55+
print('\nEpoch:\n',epoch_count)
56+
print('------------------------\n')
57+
#if epoch_count > self.epoch_number or not erro:
58+
break
59+
60+
def sort(self, sample):
61+
sample.insert(0, self.bias)
62+
u = 0
63+
for i in range(self.col_sample + 1):
64+
u = u + self.weight[i] * sample[i]
65+
66+
y = self.sign(u)
67+
68+
if y == -1:
69+
print('Sample: ', sample)
70+
print('classification: P1')
71+
else:
72+
print('Sample: ', sample)
73+
print('classification: P2')
74+
75+
def sign(self, u):
76+
return 1 if u >= 0 else -1
77+
78+
79+
samples = [
80+
[-0.6508, 0.1097, 4.0009],
81+
[-1.4492, 0.8896, 4.4005],
82+
[2.0850, 0.6876, 12.0710],
83+
[0.2626, 1.1476, 7.7985],
84+
[0.6418, 1.0234, 7.0427],
85+
[0.2569, 0.6730, 8.3265],
86+
[1.1155, 0.6043, 7.4446],
87+
[0.0914, 0.3399, 7.0677],
88+
[0.0121, 0.5256, 4.6316],
89+
[-0.0429, 0.4660, 5.4323],
90+
[0.4340, 0.6870, 8.2287],
91+
[0.2735, 1.0287, 7.1934],
92+
[0.4839, 0.4851, 7.4850],
93+
[0.4089, -0.1267, 5.5019],
94+
[1.4391, 0.1614, 8.5843],
95+
[-0.9115, -0.1973, 2.1962],
96+
[0.3654, 1.0475, 7.4858],
97+
[0.2144, 0.7515, 7.1699],
98+
[0.2013, 1.0014, 6.5489],
99+
[0.6483, 0.2183, 5.8991],
100+
[-0.1147, 0.2242, 7.2435],
101+
[-0.7970, 0.8795, 3.8762],
102+
[-1.0625, 0.6366, 2.4707],
103+
[0.5307, 0.1285, 5.6883],
104+
[-1.2200, 0.7777, 1.7252],
105+
[0.3957, 0.1076, 5.6623],
106+
[-0.1013, 0.5989, 7.1812],
107+
[2.4482, 0.9455, 11.2095],
108+
[2.0149, 0.6192, 10.9263],
109+
[0.2012, 0.2611, 5.4631]
110+
111+
]
112+
113+
exit = [-1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1]
114+
115+
network = Perceptron(sample=samples, exit = exit, learn_rate=0.01, epoch_number=1000, bias=-1)
116+
117+
network.trannig()
118+
119+
while True:
120+
sample = []
121+
for i in range(3):
122+
sample.insert(i, float(input('value: ')))
123+
network.sort(sample)

0 commit comments

Comments
 (0)