In [1]:
# Example 1: Kernel trick for linear regression
import numpy as np
import matplotlib.pyplot as plt
import cvxpy as cvx
import scipy.stats as stats
from numpy.matlib import repmat
np.set_printoptions(precision=5, suppress=True)

# load data
x0 = np.loadtxt('/content/drive/MyDrive/PythonData/homework4_class0.txt')
x1 = np.loadtxt('/content/drive/MyDrive/PythonData/homework4_class1.txt')
N0 = x0.shape[0]
N1 = x1.shape[0]
N  = N0+N1

y0 = np.zeros((N0,1))
y1 = np.ones((N1,1))
x = np.vstack((x0,x1))
y = np.vstack((y0,y1))
X = np.hstack((x, np.ones((N,1))))

# Construct kernel matrix K
h = 10
K  = np.zeros((N,N))
for i in range(N):
  for j in range(N):
    K[i,j]  = np.exp(-np.sum((X[i,:]-X[j,:])**2)/h)

# Solve the kernel linear regression problem See Lecture Slide #3 (Year 2020)
lambd = 0.01
w = np.linalg.pinv(K + lambd*np.eye(N))@y

# Evaluate on a grid of testing sites
xset = np.linspace(-5,10,100)
yset = np.linspace(-5,10,100)
output = np.zeros((100,100))
for i in range(100):
  for j in range(100):
    data = repmat( np.array([xset[j], yset[i], 1]).reshape((1,3)), N, 1)
    phi  = np.exp( -np.sum( (X-data)**2, axis=1 )/h )
    output[i,j] = np.dot(phi.T, w)

# Display
plt.scatter(x0[:,0], x0[:,1],marker='o',s=20)
plt.scatter(x1[:,0], x1[:,1],marker='+',s=60)
plt.contour(xset, yset, output>0.5, linewidths=2, colors='k')
Out[1]:
<matplotlib.contour.QuadContourSet at 0x7f4b0bb54790>
In [ ]:
%%shell
jupyter nbconvert --to html /content/ECE595_lecture11.ipynb