Finish TP3 exo2.
[TP_AA.git] / TP3 / exo2 / tp3_exo2.py
CommitLineData
ce56d6ab
JB
1#!/usr/bin/env python3
2
3# -*- coding: utf-8 -*-
4import numpy as np
5from numpy.random import rand
6import pylab as pl
7
8
9def generateData(n):
10 """
11 Generates a 2D linearly separable dataset with 2n samples.
12 The third element of the sample is the label
13 """
14 linear_offset = 0.6
15 xb = (rand(n) * 2 - 1) / 2 - linear_offset
16 yb = (rand(n) * 2 - 1) / 2 + linear_offset
17 xr = (rand(n) * 2 - 1) / 2 + linear_offset
18 yr = (rand(n) * 2 - 1) / 2 - linear_offset
19 inputs = []
20 for i in range(n):
21 inputs.append([xb[i], yb[i], -1])
22 inputs.append([xr[i], yr[i], 1])
23 return inputs
24
25
26def generateData2(n):
27 """
28 Generates a 2D linearly separable dataset with 2n samples.
29 The third element of the sample is the label
30 """
f08c4a95 31 xb = (rand(n) * 2 - 1) / 2 + 0.5
ce56d6ab
JB
32 yb = (rand(n) * 2 - 1) / 2
33 xr = (rand(n) * 2 - 1) / 2 + 1.5
34 yr = (rand(n) * 2 - 1) / 2 - 0.5
35 inputs = []
36 for i in range(n):
37 inputs.append([xb[i], yb[i], -1])
38 inputs.append([xr[i], yr[i], 1])
39 return inputs
40
41
42def generateData3(n):
43 """
df09eefa 44 Generates a 2D linearly separable dataset with about 2n samples.
ce56d6ab
JB
45 The third element of the sample is the label
46 """
47 # (xb, yb) est dans le carré centré à l’origine de côté 1
48 xb = (rand(n) * 2 - 1) / 2
49 yb = (rand(n) * 2 - 1) / 2
50 # (xr, yr) est dans le carré centré à l’origine de côté 3
51 xr = 3 * (rand(4 * n) * 2 - 1) / 2
52 yr = 3 * (rand(4 * n) * 2 - 1) / 2
53 inputs = []
54 for i in range(n):
55 inputs.append([xb[i], yb[i], -1])
56 for i in range(4 * n):
57 # on ne conserve que les points extérieurs au carré centré à l’origine
58 # de côté 2
59 if abs(xr[i]) >= 1 or abs(yr[i]) >= 1:
60 inputs.append([xr[i], yr[i], 1])
61 return inputs
62
63
64training_set_size = 150
df09eefa 65training_set = generateData3(training_set_size)
ce56d6ab
JB
66data = np.array(training_set)
67X = data[:, 0:2]
68Y = data[:, -1]
69
70
71def perceptron_nobias(X, Y):
72 w = np.zeros([len(X[0])])
73 # Go in the loop at least one time
74 classification_error = 1
75 while not classification_error == 0:
76 classification_error = 0
5d8acd86
JB
77 for x, y in zip(X, Y):
78 if y * np.dot(w, x) <= 0:
ce56d6ab 79 classification_error += 1
5d8acd86 80 w = w + y * x
df0a762f 81 print(classification_error)
ce56d6ab
JB
82 return w
83
84
85def complete(sample):
86 new_sample = np.insert(sample, len(sample[0]), [1], axis=1)
87 return np.array(new_sample)
88
89
df09eefa 90def plongement_phi(sample_element):
69b63840
JB
91 return [1, sample_element[0], sample_element[1], sample_element[0]**2,
92 sample_element[0] * sample_element[1], sample_element[1]**2]
ce56d6ab
JB
93
94
df09eefa 95def apply_plongement(sample, p):
ce56d6ab
JB
96 output = []
97 for i in range(sample.shape[0]):
df09eefa 98 current = p(sample[i])
ce56d6ab
JB
99 output.append(current)
100 return np.array(output)
101
102
4bab9ffb
JB
103def f_from_k(coeffs, support_set, k, x):
104 output = 0
105 for c, s in zip(coeffs, support_set):
5d8acd86 106 output += c * s[1] * k(s[0], x)
4bab9ffb
JB
107 return output
108
109
110def k1(X1, X2):
69b63840
JB
111 return 1 + X1[0] * X2[0] + X1[1] * X2[1] + X1[0]**2 * X2[0]**2 \
112 + X1[0] * X1[1] * X2[0] * X2[1] + X1[1]**2 * X2[1]**2
df09eefa
JB
113
114
26fd2383
JB
115def kg(x, y):
116 # sigma = 20 # do not converge
117 # sigma = 10 # do not converge
118 sigma = 1
119 # sigma = 0.5 # overfitting
120 # sigma = 0.2 # overfitting
df09eefa 121 return np.exp(-((x[0] - y[0])**2 + (x[1] - y[1])**2) / sigma**2)
4bab9ffb
JB
122
123
124def perceptron_k(X, Y, k):
125 coeffs = []
5d8acd86 126 support_set = []
4bab9ffb
JB
127 # Go in the loop at least one time
128 classification_error = 1
129 while not classification_error == 0:
130 classification_error = 0
5d8acd86
JB
131 for x, y in zip(X, Y):
132 if y * f_from_k(coeffs, support_set, k, x) <= 0:
133 if x not in support_set:
134 support_set.append((x, y))
135 coeffs.append(1)
136 else:
137 coeffs[support_set.index((x, y))] += 1
4bab9ffb 138 classification_error += 1
5d8acd86
JB
139 print(classification_error)
140 return np.array(coeffs), np.array(support_set)
4bab9ffb
JB
141
142
c60d868e
JB
143def f(w, x, y):
144 return w[0] + w[1] * x + w[2] * y + w[3] * x**2 + w[4] * x * y + w[5] * y**2
f08c4a95
JB
145
146
c60d868e 147pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size)
26fd2383 148pl.title(u"Perceptron - prolontaged hyperplan")
c60d868e 149
26fd2383
JB
150# k = k1
151# coeffs, support_set = perceptron_k(X, Y, k)
152k = kg
153coeffs, support_set = perceptron_k(X, Y, k)
c60d868e 154res = training_set_size
c60d868e
JB
155for x in range(res):
156 for y in range(res):
26fd2383 157 if abs(f_from_k(coeffs, support_set, k, [-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res])) < 0.01:
5d8acd86
JB
158 pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xr')
159
160# X = apply_plongement(X, plongement_phi)
161# w = perceptron_nobias(X, Y)
162# for x in range(res):
163# for y in range(res):
164# if abs(f(w, -3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res)) < 0.01:
165# pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xb')
4bab9ffb 166
ce56d6ab 167pl.show()