Commit | Line | Data |
---|---|---|
ce56d6ab JB |
1 | #!/usr/bin/env python3 |
2 | ||
3 | # -*- coding: utf-8 -*- | |
4 | import numpy as np | |
5 | from numpy.random import rand | |
6 | import pylab as pl | |
7 | ||
8 | ||
9 | def generateData(n): | |
10 | """ | |
11 | Generates a 2D linearly separable dataset with 2n samples. | |
12 | The third element of the sample is the label | |
13 | """ | |
14 | linear_offset = 0.6 | |
15 | xb = (rand(n) * 2 - 1) / 2 - linear_offset | |
16 | yb = (rand(n) * 2 - 1) / 2 + linear_offset | |
17 | xr = (rand(n) * 2 - 1) / 2 + linear_offset | |
18 | yr = (rand(n) * 2 - 1) / 2 - linear_offset | |
19 | inputs = [] | |
20 | for i in range(n): | |
21 | inputs.append([xb[i], yb[i], -1]) | |
22 | inputs.append([xr[i], yr[i], 1]) | |
23 | return inputs | |
24 | ||
25 | ||
26 | def generateData2(n): | |
27 | """ | |
28 | Generates a 2D linearly separable dataset with 2n samples. | |
29 | The third element of the sample is the label | |
30 | """ | |
31 | xb = (rand(n) * 2 - 1) / 2 - 0.5 | |
32 | yb = (rand(n) * 2 - 1) / 2 | |
33 | xr = (rand(n) * 2 - 1) / 2 + 1.5 | |
34 | yr = (rand(n) * 2 - 1) / 2 - 0.5 | |
35 | inputs = [] | |
36 | for i in range(n): | |
37 | inputs.append([xb[i], yb[i], -1]) | |
38 | inputs.append([xr[i], yr[i], 1]) | |
39 | return inputs | |
40 | ||
41 | ||
42 | def generateData3(n): | |
43 | """ | |
44 | Generates a 2D linearly separable dataset with 2n samples. | |
45 | The third element of the sample is the label | |
46 | """ | |
47 | # (xb, yb) est dans le carré centré à l’origine de côté 1 | |
48 | xb = (rand(n) * 2 - 1) / 2 | |
49 | yb = (rand(n) * 2 - 1) / 2 | |
50 | # (xr, yr) est dans le carré centré à l’origine de côté 3 | |
51 | xr = 3 * (rand(4 * n) * 2 - 1) / 2 | |
52 | yr = 3 * (rand(4 * n) * 2 - 1) / 2 | |
53 | inputs = [] | |
54 | for i in range(n): | |
55 | inputs.append([xb[i], yb[i], -1]) | |
56 | for i in range(4 * n): | |
57 | # on ne conserve que les points extérieurs au carré centré à l’origine | |
58 | # de côté 2 | |
59 | if abs(xr[i]) >= 1 or abs(yr[i]) >= 1: | |
60 | inputs.append([xr[i], yr[i], 1]) | |
61 | return inputs | |
62 | ||
63 | ||
64 | training_set_size = 150 | |
65 | training_set = generateData2(training_set_size) | |
66 | data = np.array(training_set) | |
67 | X = data[:, 0:2] | |
68 | Y = data[:, -1] | |
69 | ||
70 | ||
71 | def perceptron_nobias(X, Y): | |
72 | w = np.zeros([len(X[0])]) | |
73 | # Go in the loop at least one time | |
74 | classification_error = 1 | |
75 | while not classification_error == 0: | |
76 | classification_error = 0 | |
77 | for i in range(X.shape[0]): | |
78 | if Y[i] * np.dot(w, X[i]) <= 0: | |
79 | classification_error += 1 | |
80 | w = w + Y[i] * X[i] | |
81 | return w | |
82 | ||
83 | ||
84 | def complete(sample): | |
85 | new_sample = np.insert(sample, len(sample[0]), [1], axis=1) | |
86 | return np.array(new_sample) | |
87 | ||
88 | ||
89 | def plongement(sample_element): | |
90 | return [1, sample_element[0], sample_element[1], sample_element[0] * sample_element[0], sample_element[0] * sample_element[1], sample_element[1] * sample_element[1]] | |
91 | ||
92 | ||
93 | def apply_plongement(sample): | |
94 | output = [] | |
95 | for i in range(sample.shape[0]): | |
96 | current = plongement(sample[i]) | |
97 | output.append(current) | |
98 | return np.array(output) | |
99 | ||
100 | ||
101 | X = apply_plongement(X) | |
102 | w = perceptron_nobias(X, Y) | |
103 | pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size) | |
104 | pl.title(u"Perceptron - hyperplan") | |
105 | pl.show() |