Demo 1 - Fitting a straight line

In [12]:
# Visualize some noisy data
import numpy as np
import matplotlib.pyplot as plt
N = 50
x = np.linspace(-1,1,N)
theta = np.array([-4, 2])
y = theta[0] + theta[1]*x + 0.2*np.random.randn(N)
plt.plot(x, y, 'o', markersize=12)
Out[12]:
[<matplotlib.lines.Line2D at 0x7f89881bf9e8>]
In [13]:
# Fit the data with a straight line
M = 200
A = np.column_stack((np.ones(N), x))
beta = np.linalg.lstsq(A, y, rcond=None)[0]
t    = np.linspace(-1, 1, M)
yhat = beta[0]*np.ones(M) + beta[1]*t
plt.plot(x,y,'o',markersize=12)
plt.plot(t,yhat, linewidth=8)
plt.show()

Demo 2 - Fitting a polynomial

In [14]:
# Visualize some noisy data
# Use Legendre polynomial as the basis
import numpy as np
import matplotlib.pyplot as plt
from scipy.special import eval_legendre

N = 50
x = np.linspace(-1,1,N)
a = np.array([-0.001, 0.01, 0.55, 1.5, 1.2])
y = a[0]*eval_legendre(0,x) + a[1]*eval_legendre(1,x) + \
    a[2]*eval_legendre(2,x) + a[3]*eval_legendre(3,x) + \
    a[4]*eval_legendre(4,x) + 0.2*np.random.randn(N)
plt.plot(x,y,'o',markersize=12)
Out[14]:
[<matplotlib.lines.Line2D at 0x7f89880f94e0>]
In [15]:
# Fit the data with a straight line
# Use simple polynomial as the basis
M = 200
A = np.column_stack((np.ones(N), x, x**2, x**3, x**4))
beta = np.linalg.lstsq(A, y, rcond=None)[0]
t    = np.linspace(-1, 1, M);
yhat = beta[0]*np.ones(M) + beta[1]*t + \
       beta[2]*t**2 + beta[3]*t**3 + \
       beta[4]*t**4
plt.plot(x,y,'o',markersize=12)
plt.plot(t,yhat, linewidth=8)
plt.show()
In [17]:
%%shell
jupyter nbconvert --to html /content/ECE595_demo_linear_regression.ipynb
[NbConvertApp] Converting notebook /content/ECE595_demo_linear_regression.ipynb to html
[NbConvertApp] Writing 286267 bytes to /content/ECE595_demo_linear_regression.html
Out[17]: