Finish TP3 exo2.
[TP_AA.git] / TP3 / exo2 / tp3_exo2.py
index 78392d9a193fcc9fbab2d0048a22bfd3d0daf6d2..7ceadb1b8185610e7c5d66577261a187324bfe7f 100755 (executable)
@@ -28,7 +28,7 @@ def generateData2(n):
     Generates a 2D linearly separable dataset with 2n samples.
     The third element of the sample is the label
     """
-    xb = (rand(n) * 2 - 1) / 2 - 0.5
+    xb = (rand(n) * 2 - 1) / 2 + 0.5
     yb = (rand(n) * 2 - 1) / 2
     xr = (rand(n) * 2 - 1) / 2 + 1.5
     yr = (rand(n) * 2 - 1) / 2 - 0.5
@@ -74,10 +74,11 @@ def perceptron_nobias(X, Y):
     classification_error = 1
     while not classification_error == 0:
         classification_error = 0
-        for i in range(X.shape[0]):
-            if Y[i] * np.dot(w, X[i]) <= 0:
+        for x, y in zip(X, Y):
+            if y * np.dot(w, x) <= 0:
                 classification_error += 1
-                w = w + Y[i] * X[i]
+                w = w + y * x
+        print(classification_error)
     return w
 
 
@@ -87,7 +88,8 @@ def complete(sample):
 
 
 def plongement_phi(sample_element):
-    return [1, sample_element[0], sample_element[1], sample_element[0] * sample_element[0], sample_element[0] * sample_element[1], sample_element[1] * sample_element[1]]
+    return [1, sample_element[0], sample_element[1], sample_element[0]**2,
+            sample_element[0] * sample_element[1], sample_element[1]**2]
 
 
 def apply_plongement(sample, p):
@@ -101,15 +103,21 @@ def apply_plongement(sample, p):
 def f_from_k(coeffs, support_set, k, x):
     output = 0
     for c, s in zip(coeffs, support_set):
-        output += c * s[0] * k(s[1], x)
+        output += c * s[1] * k(s[0], x)
     return output
 
 
 def k1(X1, X2):
-    return 1 + X1[0] * X2[0] + X1[1] * X2[1] + X1[0] * X1[0] * X2[0] * X2[0] + X1[0] * X1[1] * X2[0] * X2[1] + X1[1] * X1[1] * X2[1] * X2[1]
+    return 1 + X1[0] * X2[0] + X1[1] * X2[1] + X1[0]**2 * X2[0]**2 \
+             + X1[0] * X1[1] * X2[0] * X2[1] + X1[1]**2 * X2[1]**2
 
 
-def kg(x, y, sigma=10):
+def kg(x, y):
+    # sigma = 20  # do not converge
+    # sigma = 10  # do not converge
+    sigma = 1
+    # sigma = 0.5  # overfitting
+    # sigma = 0.2  # overfitting
     return np.exp(-((x[0] - y[0])**2 + (x[1] - y[1])**2) / sigma**2)
 
 
@@ -120,23 +128,40 @@ def perceptron_k(X, Y, k):
     classification_error = 1
     while not classification_error == 0:
         classification_error = 0
-        for i in range(X.shape[0]):
-            if Y[i] * f_from_k(coeffs, support_set, k, X[i]) <= 0:
+        for x, y in zip(X, Y):
+            if y * f_from_k(coeffs, support_set, k, x) <= 0:
+                if x not in support_set:
+                    support_set.append((x, y))
+                    coeffs.append(1)
+                else:
+                    coeffs[support_set.index((x, y))] += 1
                 classification_error += 1
-                support_set.append([Y[i], X[i]])
-                coeffs.append(1)
-            else:
-                coeffs[len(coeffs) - 1] = coeffs[len(coeffs) - 1] + 1
-    return coeffs, support_set
+        print(classification_error)
+    return np.array(coeffs), np.array(support_set)
 
 
-print(perceptron_k(X, Y, k1))
-# print(perceptron_k(X, Y, kg))
+def f(w, x, y):
+    return w[0] + w[1] * x + w[2] * y + w[3] * x**2 + w[4] * x * y + w[5] * y**2
 
-X = apply_plongement(X, plongement_phi)
-w = perceptron_nobias(X, Y)
-print(w)
 
 pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size)
-pl.title(u"Perceptron - hyperplan")
+pl.title(u"Perceptron - prolontaged hyperplan")
+
+# k = k1
+# coeffs, support_set = perceptron_k(X, Y, k)
+k = kg
+coeffs, support_set = perceptron_k(X, Y, k)
+res = training_set_size
+for x in range(res):
+    for y in range(res):
+        if abs(f_from_k(coeffs, support_set, k, [-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res])) < 0.01:
+            pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xr')
+
+# X = apply_plongement(X, plongement_phi)
+# w = perceptron_nobias(X, Y)
+# for x in range(res):
+#     for y in range(res):
+#         if abs(f(w, -3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res)) < 0.01:
+#             pl.plot(-3 / 2 + 3 * x / res, -3 / 2 + 3 * y / res, 'xb')
+
 pl.show()