Skip to content

Commit 5d985e5

Browse files
author
Jeffin Francis
committed
Multi variable linear regression
1 parent 795d8ea commit 5d985e5

File tree

2 files changed

+1057
-0
lines changed

2 files changed

+1057
-0
lines changed
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
import numpy as np
2+
import pandas as pd
3+
import matplotlib.pyplot as plt
4+
5+
class LinearRegression:
6+
def fit(self, x, y, learning_rate):
7+
self.n_weights = np.zeros(x.shape[1])
8+
self.learning_rate = learning_rate
9+
self.loss_=[]
10+
print("Initial cost {} ".format(model.cost_function(x, y, self.n_weights)))
11+
model.gradient(x, y, self.n_weights, 10000)
12+
print("Final cost {} ".format(model.cost_function(x, y, self.n_weights)))
13+
return self.n_weights
14+
15+
16+
def cost_function(self, x, y, n_weights):
17+
n = len(y)
18+
cost = np.sum((x.dot(self.n_weights.T) - y) ** 2) / (2 * n)
19+
return cost
20+
21+
22+
def gradient(self, x, y, n_weights, epochs):
23+
m = len(y)
24+
for i in range(epochs):
25+
h = x.dot(n_weights.T)
26+
loss = h - y
27+
change=(x.T.dot(loss) / m) * self.learning_rate
28+
self.n_weights -= change
29+
self.loss_.append(model.cost_function(x, y, self.n_weights))
30+
if i % 10 == 0:
31+
print("Loss of {}th epoch is {} ".format(i , model.cost_function(x, y, self.n_weights)))
32+
return self.n_weights
33+
34+
def plot(self):
35+
plt.plot(self.loss_)
36+
plt.xlabel("Epochs")
37+
plt.ylabel("Loss")
38+
plt.show()
39+
40+
41+
42+
if __name__ == "__main__":
43+
#Importing data and some preprocessing
44+
data = pd.read_csv('student.csv')
45+
data["one"] = [1 for i in data["Math"]]
46+
math = data["Math"]
47+
write = data["Writing"]
48+
read = data["Reading"]
49+
one = data["one"]
50+
x = np.array([one,math,read]).T
51+
y = np.array(write)
52+
learning_rate = 0.0001
53+
model = LinearRegression()
54+
model.fit(x, y, learning_rate)
55+
print("Plotting loss")
56+
model.plot()

0 commit comments

Comments
 (0)