Fix TP3 exo2.
[TP_AA.git] / TP3 / exo1 / tp3_exo1.py
CommitLineData
6f4ffbd7
JB
1#!/usr/bin/env python3
2
3# -*- coding: utf-8 -*-
4import numpy as np
5from numpy.random import rand
6import pylab as pl
7
8
9def generateData(n):
10 """
11 Generates a 2D linearly separable dataset with 2n samples.
12 The third element of the sample is the label
13 """
14 linear_offset = 0.6
15 xb = (rand(n) * 2 - 1) / 2 - linear_offset
16 yb = (rand(n) * 2 - 1) / 2 + linear_offset
17 xr = (rand(n) * 2 - 1) / 2 + linear_offset
18 yr = (rand(n) * 2 - 1) / 2 - linear_offset
19 inputs = []
20 for i in range(n):
21 inputs.append([xb[i], yb[i], -1])
22 inputs.append([xr[i], yr[i], 1])
23 return inputs
24
25
26def generateData2(n):
27 """
28 Generates a 2D linearly separable dataset with 2n samples.
29 The third element of the sample is the label
30 """
f08c4a95 31 xb = (rand(n) * 2 - 1) / 2 + 0.5
6f4ffbd7
JB
32 yb = (rand(n) * 2 - 1) / 2
33 xr = (rand(n) * 2 - 1) / 2 + 1.5
34 yr = (rand(n) * 2 - 1) / 2 - 0.5
35 inputs = []
36 for i in range(n):
37 inputs.append([xb[i], yb[i], -1])
38 inputs.append([xr[i], yr[i], 1])
39 return inputs
40
41
ce56d6ab
JB
42training_set_size = 150
43training_set = generateData2(training_set_size)
6f4ffbd7
JB
44data = np.array(training_set)
45X = data[:, 0:2]
46Y = data[:, -1]
47
48
49def perceptron_nobias(X, Y):
50 w = np.zeros([len(X[0])])
51 # Go in the loop at least one time
52 classification_error = 1
53 while not classification_error == 0:
54 classification_error = 0
5d8acd86
JB
55 for x, y in zip(X, Y):
56 if y * np.dot(w, x) <= 0:
ce56d6ab 57 classification_error += 1
5d8acd86 58 w = w + y * x
6f4ffbd7
JB
59 return w
60
61
62def complete(sample):
ce56d6ab
JB
63 new_sample = np.insert(sample, len(sample[0]), [1], axis=1)
64 return np.array(new_sample)
6f4ffbd7
JB
65
66
ce56d6ab 67X = complete(X)
6f4ffbd7 68w = perceptron_nobias(X, Y)
ce56d6ab
JB
69# w is orthogonal to the hyperplan
70# with generateData
f08c4a95
JB
71# plot arguments format is pl.plot([x1,x2],[y1,y2])
72# w[0]x + w[1]y = 0, so y = -w[0]x / w[1]
ce56d6ab
JB
73# pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]])
74# with generateData2 and complete
f08c4a95
JB
75# w[0]x + w[1]y + w[2] = 0, so y = -(w[0]x + w[2]) / w[1]
76x_start1 = -0.5
77x_start2 = 2.5
78pl.plot([x_start1, x_start2], [-(w[0] * x_start1 + w[2]) /
79 w[1], -(w[0] * x_start2 + w[2]) / w[1]])
6f4ffbd7 80pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size)
ce56d6ab 81pl.title(u"Perceptron - hyperplan")
6f4ffbd7 82pl.show()