Commit | Line | Data |
---|---|---|
ce56d6ab JB |
1 | #!/usr/bin/env python3 |
2 | ||
3 | # -*- coding: utf-8 -*- | |
4 | import numpy as np | |
5 | from numpy.random import rand | |
6 | import pylab as pl | |
7 | ||
8 | ||
9 | def generateData(n): | |
10 | """ | |
11 | Generates a 2D linearly separable dataset with 2n samples. | |
12 | The third element of the sample is the label | |
13 | """ | |
14 | linear_offset = 0.6 | |
15 | xb = (rand(n) * 2 - 1) / 2 - linear_offset | |
16 | yb = (rand(n) * 2 - 1) / 2 + linear_offset | |
17 | xr = (rand(n) * 2 - 1) / 2 + linear_offset | |
18 | yr = (rand(n) * 2 - 1) / 2 - linear_offset | |
19 | inputs = [] | |
20 | for i in range(n): | |
21 | inputs.append([xb[i], yb[i], -1]) | |
22 | inputs.append([xr[i], yr[i], 1]) | |
23 | return inputs | |
24 | ||
25 | ||
26 | def generateData2(n): | |
27 | """ | |
28 | Generates a 2D linearly separable dataset with 2n samples. | |
29 | The third element of the sample is the label | |
30 | """ | |
f08c4a95 | 31 | xb = (rand(n) * 2 - 1) / 2 + 0.5 |
ce56d6ab JB |
32 | yb = (rand(n) * 2 - 1) / 2 |
33 | xr = (rand(n) * 2 - 1) / 2 + 1.5 | |
34 | yr = (rand(n) * 2 - 1) / 2 - 0.5 | |
35 | inputs = [] | |
36 | for i in range(n): | |
37 | inputs.append([xb[i], yb[i], -1]) | |
38 | inputs.append([xr[i], yr[i], 1]) | |
39 | return inputs | |
40 | ||
41 | ||
42 | def generateData3(n): | |
43 | """ | |
df09eefa | 44 | Generates a 2D linearly separable dataset with about 2n samples. |
ce56d6ab JB |
45 | The third element of the sample is the label |
46 | """ | |
47 | # (xb, yb) est dans le carré centré à l’origine de côté 1 | |
48 | xb = (rand(n) * 2 - 1) / 2 | |
49 | yb = (rand(n) * 2 - 1) / 2 | |
50 | # (xr, yr) est dans le carré centré à l’origine de côté 3 | |
51 | xr = 3 * (rand(4 * n) * 2 - 1) / 2 | |
52 | yr = 3 * (rand(4 * n) * 2 - 1) / 2 | |
53 | inputs = [] | |
54 | for i in range(n): | |
55 | inputs.append([xb[i], yb[i], -1]) | |
56 | for i in range(4 * n): | |
57 | # on ne conserve que les points extérieurs au carré centré à l’origine | |
58 | # de côté 2 | |
59 | if abs(xr[i]) >= 1 or abs(yr[i]) >= 1: | |
60 | inputs.append([xr[i], yr[i], 1]) | |
61 | return inputs | |
62 | ||
63 | ||
64 | training_set_size = 150 | |
df09eefa | 65 | training_set = generateData3(training_set_size) |
ce56d6ab JB |
66 | data = np.array(training_set) |
67 | X = data[:, 0:2] | |
68 | Y = data[:, -1] | |
69 | ||
70 | ||
71 | def perceptron_nobias(X, Y): | |
72 | w = np.zeros([len(X[0])]) | |
73 | # Go in the loop at least one time | |
74 | classification_error = 1 | |
75 | while not classification_error == 0: | |
76 | classification_error = 0 | |
5d8acd86 JB |
77 | for x, y in zip(X, Y): |
78 | if y * np.dot(w, x) <= 0: | |
ce56d6ab | 79 | classification_error += 1 |
5d8acd86 | 80 | w = w + y * x |
df0a762f | 81 | print(classification_error) |
ce56d6ab JB |
82 | return w |
83 | ||
84 | ||
85 | def complete(sample): | |
86 | new_sample = np.insert(sample, len(sample[0]), [1], axis=1) | |
87 | return np.array(new_sample) | |
88 | ||
89 | ||
df09eefa | 90 | def plongement_phi(sample_element): |
69b63840 JB |
91 | return [1, sample_element[0], sample_element[1], sample_element[0]**2, |
92 | sample_element[0] * sample_element[1], sample_element[1]**2] | |
ce56d6ab JB |
93 | |
94 | ||
df09eefa | 95 | def apply_plongement(sample, p): |
ce56d6ab JB |
96 | output = [] |
97 | for i in range(sample.shape[0]): | |
df09eefa | 98 | current = p(sample[i]) |
ce56d6ab JB |
99 | output.append(current) |
100 | return np.array(output) | |
101 | ||
102 | ||
4bab9ffb JB |
103 | def f_from_k(coeffs, support_set, k, x): |
104 | output = 0 | |
105 | for c, s in zip(coeffs, support_set): | |
5d8acd86 | 106 | output += c * s[1] * k(s[0], x) |
4bab9ffb JB |
107 | return output |
108 | ||
109 | ||
110 | def k1(X1, X2): | |
69b63840 JB |
111 | return 1 + X1[0] * X2[0] + X1[1] * X2[1] + X1[0]**2 * X2[0]**2 \ |
112 | + X1[0] * X1[1] * X2[0] * X2[1] + X1[1]**2 * X2[1]**2 | |
df09eefa JB |
113 | |
114 | ||
115 | def kg(x, y, sigma=10): | |
116 | return np.exp(-((x[0] - y[0])**2 + (x[1] - y[1])**2) / sigma**2) | |
4bab9ffb JB |
117 | |
118 | ||
119 | def perceptron_k(X, Y, k): | |
120 | coeffs = [] | |
5d8acd86 | 121 | support_set = [] |
4bab9ffb JB |
122 | # Go in the loop at least one time |
123 | classification_error = 1 | |
124 | while not classification_error == 0: | |
125 | classification_error = 0 | |
5d8acd86 JB |
126 | for x, y in zip(X, Y): |
127 | if y * f_from_k(coeffs, support_set, k, x) <= 0: | |
128 | if x not in support_set: | |
129 | support_set.append((x, y)) | |
130 | coeffs.append(1) | |
131 | else: | |
132 | coeffs[support_set.index((x, y))] += 1 | |
4bab9ffb | 133 | classification_error += 1 |
5d8acd86 JB |
134 | print(classification_error) |
135 | return np.array(coeffs), np.array(support_set) | |
4bab9ffb JB |
136 | |
137 | ||
c60d868e JB |
138 | def f(w, x, y): |
139 | return w[0] + w[1] * x + w[2] * y + w[3] * x**2 + w[4] * x * y + w[5] * y**2 | |
f08c4a95 JB |
140 | |
141 | ||
c60d868e JB |
142 | pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size) |
143 | pl.title(u"Perceptron - hyperplan") | |
144 | ||
5d8acd86 | 145 | coeffs, support_set = perceptron_k(X, Y, k1) |
f08c4a95 | 146 | # coeffs, support_set = perceptron_k(X, Y, kg) |
c60d868e | 147 | res = training_set_size |
c60d868e JB |
148 | for x in range(res): |
149 | for y in range(res): | |
5d8acd86 JB |
150 | if abs(f_from_k(coeffs, support_set, k1, [-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res])) < 0.01: |
151 | pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xr') | |
152 | ||
153 | # X = apply_plongement(X, plongement_phi) | |
154 | # w = perceptron_nobias(X, Y) | |
155 | # for x in range(res): | |
156 | # for y in range(res): | |
157 | # if abs(f(w, -3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res)) < 0.01: | |
158 | # pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xb') | |
4bab9ffb | 159 | |
ce56d6ab | 160 | pl.show() |