aboutsummaryrefslogtreecommitdiff
path: root/linear_regression/polynomial.py
blob: a62474ac28e757393eab03d2837006e324b542bb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#!/usr/bin/env python

import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
import numpy as np

# Load the data
csv="../data/polynomial.csv"
data=pd.read_csv(csv)
x=np.array(data["x"])
y=np.array(data["y"])

# Define the weight
w1=w2=w3=10

# Define our model
def h(x):
    return(w1+w2*x+w3*(x**2))

# Define all partial derivative
def dh1():
    return(1/len(x)*np.sum(h(x)-y))
def dh2():
    return(1/len(x)*np.sum((h(x)-y)*x))
def dh3():
    return(1/len(x)*np.sum((h(x)-y)*(x**2)))

# Perform the gradient decent
fig, ax = plt.subplots()
frame=0 # Current frame (plot animation)
alpha=0.005 # Proportion of the gradient to take into account
accuracy=0.000001 # Accuracy of the decent
done=False
def decent(i):
    global w1,w2,w3,x,y,frame
    while True: 
        w1_old=w1
        w1_new=w1-alpha*dh1()
        w2_old=w2
        w2_new=w2-alpha*dh2()
        w3_old=w3
        w3_new=w3-alpha*dh3()
        w1=w1_new
        w2=w2_new
        w3=w3_new

        if abs(w1_new-w1_old) <= accuracy and abs(w2_new-w2_old) <= accuracy and abs(w2_new-w2_old) <= accuracy:
            done=True
        frame+=1
        if frame >=1000:
            frame=0
            ax.clear()
            ax.set_xlim([0, 7])
            ax.set_ylim([0, 5])
            ax.plot(x,y,"ro")
            ax.plot(x,h(x))
            break

def IsDone():
    global done
    i = 0
    while not done:
        i += 1
        yield i
        
anim=FuncAnimation(fig,decent,frames=IsDone,repeat=False)
anim.save('polynomial.gif',dpi=80,writer="imagemagick")