-
Notifications
You must be signed in to change notification settings - Fork 0
/
Siraj LR-GD.py
54 lines (37 loc) · 1.17 KB
/
Siraj LR-GD.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
from numpy import *
def compute_error_for_given_points(b,m points):
totalError=0
for i in range(0,len(points)):
x= points[i, 0]
y= points[i,1]
totalError += (y-(m*x +b))**2
return totalError / float(len(points))
def step_gradient(b_current,m_current, points,learning_rate):
b_gradient = 0
m_gradient = 0
N= float(len(points))
for i in range(0, len(points)):
x = points[i,0]
y= points[i,1]
b_gradient += -(2/N) * (y - ((m_current *x) + m_current))
m_gradient += -(2/N) *x* (y - ((m_current *x) + m_current))
new_b=b_current - (learning_rate * b_gradient)
new_m=m_current - (learning_rate * m_gradient)
return [new_b,new_m]
def gradient_descent_runner(points, starting_b,starting_m,learning_rate,num_iterations):
b=starting_m
m=starting_m
for i in range(num_iterations):
b,m=step_gradient(b,m,array(points),learning_rate)
return [b,m]
def run():
points=genfromtxt('data.csv', delimiter=',')
learning_rate=0.0001
intial_b=0
initial_m=0
num_iterations=1000
[b,m]= gradient_descent_runner(points, intial_b,initial_m,learning_rate,num_iterations
print(b)
print(m)
if __name__=='__main__':
run()