-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathex2.py
97 lines (77 loc) · 3.3 KB
/
ex2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as sc
from matplotlib.legend_handler import HandlerLine2D
def softmax(z):
e_z = np.exp(z - np.max(z))
return e_z / e_z.sum()
class LogisticRegression(object):
def __init__(self, in_data, label, n_in, n_out):
self.x = in_data
self.y = label
self.w = np.zeros((n_in, n_out)) # initialize w 0
self.b = np.zeros((n_in, n_out)) # initialize bias 0
def train(self, num_of_epochs=30, lr=0.1, in_data=None):
if in_data is not None:
self.x = input
for epoch in range(num_of_epochs):
# Shuffles arrays
s = np.arange(self.x.shape[0])
np.random.shuffle(s)
self.x = self.x[s]
self.y = self.y[s]
for x, y in zip(self.x, self.y):
z = np.dot(x, self.w) + self.b
# Predicts
current_softmax = softmax(z)
y_hat = np.argmax(current_softmax) + 1
# Updates w and b
if y != y_hat:
for a in range(3):
if a + 1 == y:
self.w[0, a] -= lr * (current_softmax[0, a] * x - x)
self.b[0, a] -= lr * (current_softmax[0, a] - 1)
else:
self.w[0, a] -= lr * current_softmax[0, a] * x
self.b[0, a] -= lr * current_softmax[0, a]
def predict(self, x_val):
return np.argmax(softmax(np.dot(x_val, self.w) + self.b)) + 1
def draw_plot(self):
x_values = np.arange(0, 10, 0.1).tolist()
pdf_y_values = []
model_y_values = []
# Creates y values
for val in x_values:
pdf_y_values.append(sc.norm(2, 1).pdf(val) / (sc.norm(2, 1).pdf(val) + sc.norm(4, 1).pdf(val) +
sc.norm(6, 1).pdf(val)))
model_y_values.append(softmax(np.dot(self.w, val) + self.b).tolist()[0][0])
# Sets graph's settings
fig = plt.figure(0)
fig.canvas.set_window_title('Normal Distribution vs. Logistic Regression')
plt.title("Normal Distribution vs. Logistic Regression")
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.axis([0, 10, 0, 1.5])
plt.xlabel('x')
plt.ylabel('Probability')
normal_graph, = plt.plot(x_values, pdf_y_values, 'r--', label="Normal Distribution")
plt.plot(x_values, model_y_values, label="Logistic Regression")
plt.legend(handler_map={normal_graph: HandlerLine2D(numpoints=4)})
plt.show()
if __name__ == '__main__':
# Creates data
training_data = []
x = np.empty((0, 1), float)
y = np.empty((0, 1), int)
for a in [1, 2, 3]:
for i in range(100):
x = np.append(x, np.array([np.random.normal(2 * a, 1, 1)]), axis=0)
y = np.append(y, np.array([[a]]), axis=0)
# # Draws a scatter of points
# plt.scatter(x[0:100], [1] * 100, color='green')
# plt.scatter(x[100:200], [2] * 100, color='red')
# plt.scatter(x[200:300], [3] * 100, color='blue')
# plt.show()
# Constructs the Logistic Regression classifier
classifier = LogisticRegression(x, y, 1, 3)
classifier.train()
classifier.draw_plot()