Skip to content

Commit 72857f4

Browse files
Add files via upload
1 parent 7620dde commit 72857f4

File tree

3 files changed

+103
-0
lines changed

3 files changed

+103
-0
lines changed

generate_data.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
#!/usr/bin/env python
2+
#coding: utf-8
3+
4+
import numpy as np
5+
6+
def generateData(n):
7+
"""
8+
generates a 2D linearly separable dataset with n samples.
9+
The thired element of the sample is the label
10+
"""
11+
xb = (np.random.rand(n) * 2 -1) / 2 - 0.5
12+
yb = (np.random.rand(n) * 2 -1) / 2 + 0.5
13+
xr = (np.random.rand(n) * 2 -1) / 2 + 0.5
14+
yr = (np.random.rand(n) * 2 -1) / 2 - 0.5
15+
inputs = []
16+
inputs.extend([[xb[i], yb[i], 1] for i in xrange(n)])
17+
inputs.extend([[xr[i], yr[i], -1] for i in xrange(n)])
18+
return inputs

perceptron.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
#!/usr/bin/env python
2+
#coding: utf-8
3+
4+
import random
5+
6+
class Perceptron(object):
7+
"""docstring for Perceptron"""
8+
def __init__(self):
9+
super(Perceptron, self).__init__()
10+
self.w = [random.random() * 2 - 1 for _ in xrange(2)] # weights
11+
self.learningRate = 0.1
12+
13+
def response(self, x):
14+
"""perceptron output"""
15+
# y = x[0] * self.w[0] + x[1] * self.w[1] # dot product between w and x
16+
y = sum([i * j for i, j in zip(self.w, x)]) # more pythonic
17+
if y >= 0:
18+
return 1
19+
else:
20+
return -1
21+
22+
def updateWeights(self, x, iterError):
23+
"""
24+
upates the wights status, w at time t+1 is
25+
w(t+1) = w(t) + learningRate * (d - r) * x
26+
iterError is (d - r)
27+
"""
28+
# self.w[0] += self.learningRate * iterError * x[0]
29+
# self.w[1] += self.learningRate * iterError * x[1]
30+
self.w = \
31+
[i + self.learningRate * iterError * j for i, j in zip(self.w, x)]
32+
33+
def train(self, data):
34+
"""
35+
trains all the vector in data.
36+
Every vector in data must three elements.
37+
the third eclemnt(x[2]) must be the label(desired output)
38+
"""
39+
learned = False
40+
iteration = 0
41+
while not learned:
42+
globalError = 0.0
43+
for x in data: # for each sample
44+
r = self.response(x)
45+
if x[2] != r: # if have a wrong response
46+
iterError = x[2] - r # desired response - actual response
47+
self.updateWeights(x, iterError)
48+
globalError += abs(iterError)
49+
iteration += 1
50+
if globalError == 0.0 or iteration >= 100: # stop criteria
51+
print 'iterations: %s' % iteration
52+
learned = True # stop learing

test.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
#!/usr/bin/env python
2+
#coding: utf-8
3+
4+
from matplotlib.pylab import *
5+
# local module
6+
import generate_data
7+
import perceptron
8+
9+
trainset = generate_data.generateData(80) # train set generation
10+
testset = generate_data.generateData(20) # test set generation
11+
p = perceptron.Perceptron() # use a short
12+
p.train(trainset)
13+
14+
#Perceptron test
15+
for x in testset:
16+
r = p.response(x)
17+
if r != x[2]: # if the response is not correct
18+
print 'not hit.'
19+
if r == 1:
20+
plot(x[0], x[1], 'ob')
21+
else:
22+
plot(x[0], x[1], 'or')
23+
24+
# plot of the separation line.
25+
# The centor of line is the coordinate origin
26+
# So the length of line is 2
27+
# The separation line is orthogonal to w
28+
n = norm(p.w) # aka the length of p.w vector
29+
ww = p.w / n # a unit vector
30+
ww1 = [ww[1], -ww[0]]
31+
ww2 = [-ww[1], ww[0]]
32+
plot([ww1[0], ww2[0]], [ww1[1], ww2[1]], '--k')
33+
show()

0 commit comments

Comments
 (0)