Demo Lecture 07 Bayesian Decision Rule

Feb-11, 2021

In [ ]:
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as stats

#  Generating the data
mu0 = np.array([0,0])
mu1 = np.array([5,5])
sigma  = 2
Sigma0 = (sigma**2) * np.array([[1,0],[0,1]])
Sigma1 = (sigma**2) * np.array([[1,0],[0,1]])
pi0 = 0.5
pi1 = 0.5

x_class0 = np.random.multivariate_normal(mu0,Sigma0,100)
x_class1 = np.random.multivariate_normal(mu1,Sigma1,100)

plt.plot(x_class0[:,0], x_class0[:,1],'x')
plt.plot(x_class1[:,0], x_class1[:,1],'ro')
Out[ ]:
[<matplotlib.lines.Line2D at 0x7fecff867908>]
In [ ]:
# Compute the separating hyperplane
w = (mu1-mu0)/sigma**2
w0 = -(np.linalg.norm(mu1)**2-np.linalg.norm(mu0)**2)/(2*(sigma**2)) + np.log(pi1/pi0)

# Visualize the separating hyperplane
x = np.linspace(-3,8,100)
y = -(w[0]/w[1])*x - w0/w[1]

plt.plot(x_class0[:,0], x_class0[:,1],'x')
plt.plot(x_class1[:,0], x_class1[:,1],'ro')
plt.plot(x,y,'k',linewidth=4)

# New testing sample
x_test = np.array([-3,-4])
g_x    = np.dot(w,x_test) + w0
print("Predicted Class is", np.sign(g_x).astype(int))
Predicted Class is -1