-
Notifications
You must be signed in to change notification settings - Fork 0
/
linear_regression.py
91 lines (62 loc) · 2.7 KB
/
linear_regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import numpy as np
import matplotlib.pyplot as plt
from training_model import TrainingModel
# environ to see if there is a display defined for showing off plots
from os import environ
class LinearRegression(TrainingModel):
def __init__(self, X, y, learning_rate, number_of_epochs):
super(LinearRegression, self).__init__(X, y, learning_rate, number_of_epochs)
def calculate_cost(self):
'''
compute the cost of the logistic algorithm with weights <W>
'''
self.cost = np.sum(1/2 * np.power(self.y - np.matmul(self.X, np.transpose(self.weights)), 2))
def calculate_grad(self):
self.grad = -np.matmul(np.transpose(self.y - np.matmul(self.X, np.transpose(self.weights))), self.X)
def train_model(self):
# training weights
for epoch in range(self.number_of_epochs):
# calculate gradient
self.calculate_grad()
# adjust weights
self.weights = self.weights - self.learning_rate * self.grad
if (epoch + 1) % 6000 == 0 or epoch == 0:
# calculate cost
self.calculate_cost()
print("Cost = %f" % self.cost)
if 'DISPLAY' in environ.keys():
y_predict = np.matmul(self.X, np.transpose(self.weights))
plt.plot(self.X[:,1], self.y, 'ro')
plt.plot(self.X[:,1], y_predict, 'bo')
plt.show()
return self.weights
def test_model(self):
pass
def main():
# use this for testing the model
# define some dummy data
X = np.sort(np.random.rand(30)) * 10
noise = (np.sort(np.random.rand(30)) - .5) * 30
y = np.multiply(- .002 * np.power(X, 6) + .3 * np.power(X, 5) + .002 * np.power(X, 4) + .00000000000033 * X + 10, noise)
y = y.reshape(30,1)
# we know from the dummy data that a 6th degree polynomial should model this
# well. With that in mind, we will create a feature for each power of X: 0
# through 7.
X_features = []
for i in range(0, 7):
x_pow = np.power(X, i)
X_features.append(x_pow)
# here is our new features vector!
X_features = np.transpose(np.array(X_features))
if 'DISPLAY' in environ.keys():
# plot X and y to get an idea what we are looking at.
plt.plot(X, y, 'ro')
plt.show()
# learning rate and number of epochs. TODO: figure out why the learning rate
# has to be so low to prevent divergence
learning_rate = .00000000000001
number_of_epochs = 100000
linear_model = LinearRegression(X_features, y, learning_rate, number_of_epochs)
linear_model.train_model()
if __name__ == "__main__":
main()