Add TP3 exo3 and TP4 exo1.
[TP_AA.git] / TP4 / exo1 / tp4_exo1.py
diff --git a/TP4/exo1/tp4_exo1.py b/TP4/exo1/tp4_exo1.py
new file mode 100755 (executable)
index 0000000..06bc95b
--- /dev/null
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+
+# -*- coding: utf-8 -*-
+import numpy as np
+import pylab as pl
+from mpl_toolkits.mplot3d import Axes3D
+
+
+data = np.loadtxt("dataRegLin2D.txt")
+X = data[:, 0:2]
+Y = data[:, -1]
+
+
+def complete(sample):
+    if sample.ndim > 1:
+        ones = np.ones((sample.shape[0], 1))
+        new_sample = np.append(sample, ones, axis=-1)
+    else:
+        new_sample = []
+        for s in sample:
+            s = [s, 1]
+            new_sample.append(s)
+    return np.array(new_sample)
+
+
+def train_regression(X, Y):
+    X = complete(X)
+    return np.dot(np.dot(np.linalg.inv(np.dot(np.transpose(X), X)), np.transpose(X)), Y)
+
+
+def predict(x, w):
+    return np.dot(w[:len(w) - 1], x) + w[-1]
+
+
+def error(X, Y, w, idx):
+    err = 0.0
+    for i in range(len(X)):
+        y = predict(X[i, :idx], w)
+        err += (y - Y[i])**2
+    err /= len(X)
+    return err
+
+
+fig = pl.figure()
+ax = fig.add_subplot(131, projection='3d')
+ax.scatter(X[:, 0], X[:, 1], Y)
+w1 = train_regression(X, Y)
+print(error(X, Y, w1, 2))
+
+ax = fig.add_subplot(132)
+ax.scatter(X[:, 0], Y[:])
+w2 = train_regression(X[:, 0], Y)
+print(error(X[:, 0].reshape((len(X), 1)), Y, w2, 1))
+
+ax = fig.add_subplot(133)
+ax.scatter(X[:, 1], Y[:])
+w3 = train_regression(X[:, 1], Y)
+print(error(X[:, 1].reshape((len(X), 1)), Y, w3, 1))
+
+pl.show()