From 6f4ffbd78d75ea191f5d7708c1fe3a6f5ae734b5 Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Tue, 13 Nov 2018 14:17:27 +0100 Subject: [PATCH] Add TP3 exo 1 first question implementation. MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Signed-off-by: Jérôme Benoit --- TP3/exo1/tp3_exo1.py | 70 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100755 TP3/exo1/tp3_exo1.py diff --git a/TP3/exo1/tp3_exo1.py b/TP3/exo1/tp3_exo1.py new file mode 100755 index 0000000..dfd04d8 --- /dev/null +++ b/TP3/exo1/tp3_exo1.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 + +# -*- coding: utf-8 -*- +import numpy as np +from numpy.random import rand +import pylab as pl + + +def generateData(n): + """ + Generates a 2D linearly separable dataset with 2n samples. + The third element of the sample is the label + """ + linear_offset = 0.6 + xb = (rand(n) * 2 - 1) / 2 - linear_offset + yb = (rand(n) * 2 - 1) / 2 + linear_offset + xr = (rand(n) * 2 - 1) / 2 + linear_offset + yr = (rand(n) * 2 - 1) / 2 - linear_offset + inputs = [] + for i in range(n): + inputs.append([xb[i], yb[i], -1]) + inputs.append([xr[i], yr[i], 1]) + return inputs + + +def generateData2(n): + """ + Generates a 2D linearly separable dataset with 2n samples. + The third element of the sample is the label + """ + xb = (rand(n) * 2 - 1) / 2 - 0.5 + yb = (rand(n) * 2 - 1) / 2 + xr = (rand(n) * 2 - 1) / 2 + 1.5 + yr = (rand(n) * 2 - 1) / 2 - 0.5 + inputs = [] + for i in range(n): + inputs.append([xb[i], yb[i], -1]) + inputs.append([xr[i], yr[i], 1]) + return inputs + + +training_set_size = 100 +training_set = generateData(training_set_size) +data = np.array(training_set) +X = data[:, 0:2] +Y = data[:, -1] + + +def perceptron_nobias(X, Y): + w = np.zeros([len(X[0])]) + # Go in the loop at least one time + classification_error = 1 + while not classification_error == 0: + classification_error = 0 + for i in range(X.shape[0]): + if Y[i] * np.dot(w, X[i]) <= 0: + classification_error = classification_error + 1 + w = w + Y[i] * X[i] + return w + + +def complete(sample): + sample = np.expand_dims(sample, axis=0) + return sample + + +w = perceptron_nobias(X, Y) +pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]]) +pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size) +pl.show() -- 2.34.1