Add the exo2 beginning implementation.
[TP_AA.git] / TP3 / exo1 / tp3_exo1.py
index dfd04d831840fa1024b5a45c35073325826d4ffa..73dd307de18e12ea18c25324c03142001c04c12d 100755 (executable)
@@ -39,8 +39,8 @@ def generateData2(n):
     return inputs
 
 
-training_set_size = 100
-training_set = generateData(training_set_size)
+training_set_size = 150
+training_set = generateData2(training_set_size)
 data = np.array(training_set)
 X = data[:, 0:2]
 Y = data[:, -1]
@@ -54,17 +54,24 @@ def perceptron_nobias(X, Y):
         classification_error = 0
         for i in range(X.shape[0]):
             if Y[i] * np.dot(w, X[i]) <= 0:
-                classification_error = classification_error + 1
+                classification_error += 1
                 w = w + Y[i] * X[i]
     return w
 
 
 def complete(sample):
-    sample = np.expand_dims(sample, axis=0)
-    return sample
+    new_sample = np.insert(sample, len(sample[0]), [1], axis=1)
+    return np.array(new_sample)
 
 
+X = complete(X)
 w = perceptron_nobias(X, Y)
-pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]])
+# w is orthogonal to the hyperplan
+# with generateData
+# pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]])
+# with generateData2 and complete
+# FIXME: the hyperplan equation is not correct
+pl.plot([0, -1 / w[1]], [w[0] / w[1] - 1 / w[1], -w[0] / w[1] - 1 / w[1]])
 pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size)
+pl.title(u"Perceptron - hyperplan")
 pl.show()