3 # -*- coding: utf-8 -*-
5 from numpy
.random
import rand
11 Generates a 2D linearly separable dataset with 2n samples.
12 The third element of the sample is the label
15 xb
= (rand(n
) * 2 - 1) / 2 - linear_offset
16 yb
= (rand(n
) * 2 - 1) / 2 + linear_offset
17 xr
= (rand(n
) * 2 - 1) / 2 + linear_offset
18 yr
= (rand(n
) * 2 - 1) / 2 - linear_offset
21 inputs
.append([xb
[i
], yb
[i
], -1])
22 inputs
.append([xr
[i
], yr
[i
], 1])
28 Generates a 2D linearly separable dataset with 2n samples.
29 The third element of the sample is the label
31 xb
= (rand(n
) * 2 - 1) / 2 - 0.5
32 yb
= (rand(n
) * 2 - 1) / 2
33 xr
= (rand(n
) * 2 - 1) / 2 + 1.5
34 yr
= (rand(n
) * 2 - 1) / 2 - 0.5
37 inputs
.append([xb
[i
], yb
[i
], -1])
38 inputs
.append([xr
[i
], yr
[i
], 1])
44 Generates a 2D linearly separable dataset with about 2n samples.
45 The third element of the sample is the label
47 # (xb, yb) est dans le carré centré à l’origine de côté 1
48 xb
= (rand(n
) * 2 - 1) / 2
49 yb
= (rand(n
) * 2 - 1) / 2
50 # (xr, yr) est dans le carré centré à l’origine de côté 3
51 xr
= 3 * (rand(4 * n
) * 2 - 1) / 2
52 yr
= 3 * (rand(4 * n
) * 2 - 1) / 2
55 inputs
.append([xb
[i
], yb
[i
], -1])
56 for i
in range(4 * n
):
57 # on ne conserve que les points extérieurs au carré centré à l’origine
59 if abs(xr
[i
]) >= 1 or abs(yr
[i
]) >= 1:
60 inputs
.append([xr
[i
], yr
[i
], 1])
64 training_set_size
= 150
65 training_set
= generateData3(training_set_size
)
66 data
= np
.array(training_set
)
71 def perceptron_nobias(X
, Y
):
72 w
= np
.zeros([len(X
[0])])
73 # Go in the loop at least one time
74 classification_error
= 1
75 while not classification_error
== 0:
76 classification_error
= 0
77 for i
in range(X
.shape
[0]):
78 if Y
[i
] * np
.dot(w
, X
[i
]) <= 0:
79 classification_error
+= 1
85 new_sample
= np
.insert(sample
, len(sample
[0]), [1], axis
=1)
86 return np
.array(new_sample
)
89 def plongement_phi(sample_element
):
90 return [1, sample_element
[0], sample_element
[1], sample_element
[0] * sample_element
[0], sample_element
[0] * sample_element
[1], sample_element
[1] * sample_element
[1]]
93 def apply_plongement(sample
, p
):
95 for i
in range(sample
.shape
[0]):
96 current
= p(sample
[i
])
97 output
.append(current
)
98 return np
.array(output
)
101 def f_from_k(coeffs
, support_set
, k
, x
):
103 for c
, s
in zip(coeffs
, support_set
):
104 output
+= c
* s
[0] * k(s
[1], x
)
109 return 1 + X1
[0] * X2
[0] + X1
[1] * X2
[1] + X1
[0] * X1
[0] * X2
[0] * X2
[0] + X1
[0] * X1
[1] * X2
[0] * X2
[1] + X1
[1] * X1
[1] * X2
[1] * X2
[1]
112 def kg(x
, y
, sigma
=10):
113 return np
.exp(-((x
[0] - y
[0])**2 + (x
[1] - y
[1])**2) / sigma
**2)
116 def perceptron_k(X
, Y
, k
):
119 # Go in the loop at least one time
120 classification_error
= 1
121 while not classification_error
== 0:
122 classification_error
= 0
123 for i
in range(X
.shape
[0]):
124 if Y
[i
] * f_from_k(coeffs
, support_set
, k
, X
[i
]) <= 0:
125 classification_error
+= 1
126 support_set
.append([Y
[i
], X
[i
]])
129 coeffs
[len(coeffs
) - 1] = coeffs
[len(coeffs
) - 1] + 1
130 return coeffs
, support_set
133 print(perceptron_k(X
, Y
, k1
))
134 # print(perceptron_k(X, Y, kg))
136 X
= apply_plongement(X
, plongement_phi
)
137 w
= perceptron_nobias(X
, Y
)
140 pl
.scatter(X
[:, 0], X
[:, 1], c
=Y
, s
=training_set_size
)
141 pl
.title(u
"Perceptron - hyperplan")