X-Git-Url: https://git.piment-noir.org/?p=TP_AA.git;a=blobdiff_plain;f=TP3%2Fexo2%2Ftp3_exo2.py;fp=TP3%2Fexo2%2Ftp3_exo2.py;h=c92d590e7013db5771abd8c644a62c94418baa32;hp=0000000000000000000000000000000000000000;hb=ce56d6abaff4ef1fd9598f542d521764278ca5bb;hpb=6f4ffbd78d75ea191f5d7708c1fe3a6f5ae734b5 diff --git a/TP3/exo2/tp3_exo2.py b/TP3/exo2/tp3_exo2.py new file mode 100755 index 0000000..c92d590 --- /dev/null +++ b/TP3/exo2/tp3_exo2.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 + +# -*- coding: utf-8 -*- +import numpy as np +from numpy.random import rand +import pylab as pl + + +def generateData(n): + """ + Generates a 2D linearly separable dataset with 2n samples. + The third element of the sample is the label + """ + linear_offset = 0.6 + xb = (rand(n) * 2 - 1) / 2 - linear_offset + yb = (rand(n) * 2 - 1) / 2 + linear_offset + xr = (rand(n) * 2 - 1) / 2 + linear_offset + yr = (rand(n) * 2 - 1) / 2 - linear_offset + inputs = [] + for i in range(n): + inputs.append([xb[i], yb[i], -1]) + inputs.append([xr[i], yr[i], 1]) + return inputs + + +def generateData2(n): + """ + Generates a 2D linearly separable dataset with 2n samples. + The third element of the sample is the label + """ + xb = (rand(n) * 2 - 1) / 2 - 0.5 + yb = (rand(n) * 2 - 1) / 2 + xr = (rand(n) * 2 - 1) / 2 + 1.5 + yr = (rand(n) * 2 - 1) / 2 - 0.5 + inputs = [] + for i in range(n): + inputs.append([xb[i], yb[i], -1]) + inputs.append([xr[i], yr[i], 1]) + return inputs + + +def generateData3(n): + """ + Generates a 2D linearly separable dataset with 2n samples. + The third element of the sample is the label + """ + # (xb, yb) est dans le carré centré à l’origine de côté 1 + xb = (rand(n) * 2 - 1) / 2 + yb = (rand(n) * 2 - 1) / 2 + # (xr, yr) est dans le carré centré à l’origine de côté 3 + xr = 3 * (rand(4 * n) * 2 - 1) / 2 + yr = 3 * (rand(4 * n) * 2 - 1) / 2 + inputs = [] + for i in range(n): + inputs.append([xb[i], yb[i], -1]) + for i in range(4 * n): + # on ne conserve que les points extérieurs au carré centré à l’origine + # de côté 2 + if abs(xr[i]) >= 1 or abs(yr[i]) >= 1: + inputs.append([xr[i], yr[i], 1]) + return inputs + + +training_set_size = 150 +training_set = generateData2(training_set_size) +data = np.array(training_set) +X = data[:, 0:2] +Y = data[:, -1] + + +def perceptron_nobias(X, Y): + w = np.zeros([len(X[0])]) + # Go in the loop at least one time + classification_error = 1 + while not classification_error == 0: + classification_error = 0 + for i in range(X.shape[0]): + if Y[i] * np.dot(w, X[i]) <= 0: + classification_error += 1 + w = w + Y[i] * X[i] + return w + + +def complete(sample): + new_sample = np.insert(sample, len(sample[0]), [1], axis=1) + return np.array(new_sample) + + +def plongement(sample_element): + return [1, sample_element[0], sample_element[1], sample_element[0] * sample_element[0], sample_element[0] * sample_element[1], sample_element[1] * sample_element[1]] + + +def apply_plongement(sample): + output = [] + for i in range(sample.shape[0]): + current = plongement(sample[i]) + output.append(current) + return np.array(output) + + +X = apply_plongement(X) +w = perceptron_nobias(X, Y) +pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size) +pl.title(u"Perceptron - hyperplan") +pl.show()