| 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | # -*- coding: utf-8 -*- |
| 4 | import numpy as np |
| 5 | import pylab as pl |
| 6 | from mpl_toolkits.mplot3d import Axes3D |
| 7 | |
| 8 | |
| 9 | data = np.loadtxt("dataRegLin2D.txt") |
| 10 | X = data[:, 0:2] |
| 11 | Y = data[:, -1] |
| 12 | |
| 13 | |
| 14 | def complete(sample): |
| 15 | if sample.ndim > 1: |
| 16 | ones = np.ones((sample.shape[0], 1)) |
| 17 | new_sample = np.append(sample, ones, axis=-1) |
| 18 | else: |
| 19 | new_sample = [] |
| 20 | for s in sample: |
| 21 | s = [s, 1] |
| 22 | new_sample.append(s) |
| 23 | return np.array(new_sample) |
| 24 | |
| 25 | |
| 26 | def train_regression(X, Y): |
| 27 | X = complete(X) |
| 28 | return np.dot(np.dot(np.linalg.inv(np.dot(np.transpose(X), X)), np.transpose(X)), Y) |
| 29 | |
| 30 | |
| 31 | def predict(x, w): |
| 32 | return np.dot(w[:len(w) - 1], x) + w[-1] |
| 33 | |
| 34 | |
| 35 | def error(X, Y, w, idx): |
| 36 | err = 0.0 |
| 37 | for i in range(len(X)): |
| 38 | y = predict(X[i, :idx], w) |
| 39 | err += (y - Y[i])**2 |
| 40 | err /= len(X) |
| 41 | return err |
| 42 | |
| 43 | |
| 44 | fig = pl.figure() |
| 45 | ax = fig.add_subplot(131, projection='3d') |
| 46 | ax.scatter(X[:, 0], X[:, 1], Y) |
| 47 | w1 = train_regression(X, Y) |
| 48 | print(error(X, Y, w1, 2)) |
| 49 | |
| 50 | ax = fig.add_subplot(132) |
| 51 | ax.scatter(X[:, 0], Y[:]) |
| 52 | w2 = train_regression(X[:, 0], Y) |
| 53 | print(error(X[:, 0].reshape((len(X), 1)), Y, w2, 1)) |
| 54 | |
| 55 | ax = fig.add_subplot(133) |
| 56 | ax.scatter(X[:, 1], Y[:]) |
| 57 | w3 = train_regression(X[:, 1], Y) |
| 58 | print(error(X[:, 1].reshape((len(X), 1)), Y, w3, 1)) |
| 59 | |
| 60 | pl.show() |