loading
open in
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
"""
Author: Antonio Cali
Project: perceptron
---
Perceptron Class:
    3 important function:
    -activateFunction
    -fit
    -train

    activateFunction: defines the output of inputs (sum of weights)
    fit: defines how perceptron is able to receive inputs and generate an output
    train: defines how weights should change
----
"""
class Perceptron:
    def __init__(self, n, lr):
        self.n = n
        self.lr = lr
        self.weights = []
        for _ in range(self.n):
            self.weights.append(0)

    #A simple sign(x) function
    def activateFunction(self, n):
        if n >= 0:
            return 1
        else:
            return -1
    #Calcs sum of weightX*pointX+weightY*pointY+weightB*bias and return sign of sum
    def fit(self, point):
        sum_ = 0.0
        for i,(w,c) in enumerate(zip(self.weights, point)):
            sum_ += w*c
        return self.activateFunction(sum_)

    #Train itself, adjusting owns weights
    def train(self, inputs, desired):
        guess = self.fit(inputs)
        error = desired - guess
        for i, (w,inp) in enumerate(zip(self.weights,inputs)):
            self.weights[i] += self.lr * error * inp

"""
---
Trainer Class:
    Simple point class that have a X and Y Coordinate.
    We pass also the function of the straight line that divide the plane:
    usefull to automatically calculate the expected output of classification
----
"""
class Trainer:
    def __init__(self, x, y, function):
        self.inputs = [x,y,1]
        self.output = 1
        if (y<function(x)):
            self.output = -1

import random
# Define a main() function that prints a little greeting.
def main():
    #f: is straight line that divide our plane in 2 different classes
    f = lambda x: x*2+1
    """
    c: is the learning rate. (0.0,1.0].
    High value: faster learning but error rate will probably be higher
    Low Value: slower learning, if too low it could "stuck" the learning process
    """
    c = 0.1
    """
    p: Perceptron.
    Number 3 is fixed, it represents the (n-1)-dimensional case, for this example
    we're working on 2-dimension. The (+1) dimension stands for the bias value (fixed to 1)
    """
    p = Perceptron(3, c)
    #n: number of points to use to traing (and test).
    n = 1000
    #d: display at every percent of rounds to show current learning/test phase
    d = 0.1
    trainingInputs = []
    for _ in range(n):
        trainingInputs.append(Trainer(random.randint(-100, 100), random.randint(-100, 100), f))
    for i, t in enumerate(trainingInputs):
        p.train(t.inputs, t.output)
        if i % int((n/100)*d) == 0 or i == len(trainingInputs)-1:
            correctGuess = 0
            for point in trainingInputs:
                if point.output == p.fit(point.inputs):
                    correctGuess+=1
            print "Round %d / Correct Guess %i" % (i, correctGuess)
    print p.weights


# Just run the code
main()