--- /dev/null
+#!/usr/bin/env python3
+
+# -*- coding: utf-8 -*-
+import numpy as np
+from numpy.random import rand
+import pylab as pl
+
+
+def generateData(n):
+ """
+ Generates a 2D linearly separable dataset with 2n samples.
+ The third element of the sample is the label
+ """
+ linear_offset = 0.6
+ xb = (rand(n) * 2 - 1) / 2 - linear_offset
+ yb = (rand(n) * 2 - 1) / 2 + linear_offset
+ xr = (rand(n) * 2 - 1) / 2 + linear_offset
+ yr = (rand(n) * 2 - 1) / 2 - linear_offset
+ inputs = []
+ for i in range(n):
+ inputs.append([xb[i], yb[i], -1])
+ inputs.append([xr[i], yr[i], 1])
+ return inputs
+
+
+def generateData2(n):
+ """
+ Generates a 2D linearly separable dataset with 2n samples.
+ The third element of the sample is the label
+ """
+ xb = (rand(n) * 2 - 1) / 2 - 0.5
+ yb = (rand(n) * 2 - 1) / 2
+ xr = (rand(n) * 2 - 1) / 2 + 1.5
+ yr = (rand(n) * 2 - 1) / 2 - 0.5
+ inputs = []
+ for i in range(n):
+ inputs.append([xb[i], yb[i], -1])
+ inputs.append([xr[i], yr[i], 1])
+ return inputs
+
+
+training_set_size = 100
+training_set = generateData(training_set_size)
+data = np.array(training_set)
+X = data[:, 0:2]
+Y = data[:, -1]
+
+
+def perceptron_nobias(X, Y):
+ w = np.zeros([len(X[0])])
+ # Go in the loop at least one time
+ classification_error = 1
+ while not classification_error == 0:
+ classification_error = 0
+ for i in range(X.shape[0]):
+ if Y[i] * np.dot(w, X[i]) <= 0:
+ classification_error = classification_error + 1
+ w = w + Y[i] * X[i]
+ return w
+
+
+def complete(sample):
+ sample = np.expand_dims(sample, axis=0)
+ return sample
+
+
+w = perceptron_nobias(X, Y)
+pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]])
+pl.scatter(X[:, 0], X[:, 1], c=Y, s=training_set_size)
+pl.show()