9efcb98968963696643b9dc30801c0a0f3d9b1a5
3 # -*- coding: utf-8 -*-
5 from numpy
.random
import rand
11 Generates a 2D linearly separable dataset with 2n samples.
12 The third element of the sample is the label
15 xb
= (rand(n
) * 2 - 1) / 2 - linear_offset
16 yb
= (rand(n
) * 2 - 1) / 2 + linear_offset
17 xr
= (rand(n
) * 2 - 1) / 2 + linear_offset
18 yr
= (rand(n
) * 2 - 1) / 2 - linear_offset
21 inputs
.append([xb
[i
], yb
[i
], -1])
22 inputs
.append([xr
[i
], yr
[i
], 1])
28 Generates a 2D linearly separable dataset with 2n samples.
29 The third element of the sample is the label
31 xb
= (rand(n
) * 2 - 1) / 2 + 0.5
32 yb
= (rand(n
) * 2 - 1) / 2
33 xr
= (rand(n
) * 2 - 1) / 2 + 1.5
34 yr
= (rand(n
) * 2 - 1) / 2 - 0.5
37 inputs
.append([xb
[i
], yb
[i
], -1])
38 inputs
.append([xr
[i
], yr
[i
], 1])
42 training_set_size
= 150
43 training_set
= generateData2(training_set_size
)
44 data
= np
.array(training_set
)
49 def perceptron_nobias(X
, Y
):
50 w
= np
.zeros([len(X
[0])])
51 # Go in the loop at least one time
52 classification_error
= 1
53 while not classification_error
== 0:
54 classification_error
= 0
55 for x
, y
in zip(X
, Y
):
56 if y
* np
.dot(w
, x
) <= 0:
57 classification_error
+= 1
63 new_sample
= np
.insert(sample
, len(sample
[0]), [1], axis
=1)
64 return np
.array(new_sample
)
68 w
= perceptron_nobias(X
, Y
)
69 # w is orthogonal to the hyperplan
71 # plot arguments format is pl.plot([x1,x2],[y1,y2])
72 # w[0]x + w[1]y = 0, so y = -w[0]x / w[1]
73 # pl.plot([-1, 1], [w[0] / w[1], -w[0] / w[1]])
74 # with generateData2 and complete
75 # w[0]x + w[1]y + w[2] = 0, so y = -(w[0]x + w[2]) / w[1]
78 pl
.plot([x_start1
, x_start2
], [-(w
[0] * x_start1
+ w
[2]) /
79 w
[1], -(w
[0] * x_start2
+ w
[2]) / w
[1]])
80 pl
.scatter(X
[:, 0], X
[:, 1], c
=Y
, s
=training_set_size
)
81 pl
.title(u
"Perceptron - hyperplan")