Reproduce the results in the Python ML Tutorial, but using the following changes:
- Linear regression: y = 5*sin(2*pi*t/8 + pi/9); where t = 100 values between 1 and 2
- Logistic regression: 4 classes
- class 1: centered at (2,2), std deviation = 2
- class 2: centered at (10,10), std deviation = 3
- class 3: centered at (2,10), std deviation = 5
- class 4: centered at (10,5), std deviation = 3
3. For binary logistic regression use class 1 and 2
4. For k-binary and softmax use all classes
Code to modify:
import numpy as np
import matplotlib.pyplot as plt
a=10
b=90
x = (b-a)* np.random.random((100, 1)) + a
noise = 10*np.random.normal(size=x.shape)
slope = 2.5
y_int = 3.25
y = slope*x + y_int + noise
plt.scatter(x,y)
plt.plot(np.linspace(0,100,100),3.25+2.5*np.linspace(0,100,100),’r–‘) #true y for comparison
plt.xlabel(‘x’)
plt.ylabel(‘y’)
plt.show
m = len(x)
w = 10*np.random.random((2,1))
alpha = 0.0001
itera = 1000
dJdw0 = 1
dJdw1 = x
for i in range(itera):
y_hat = w[0] + w[1]*x
error = y_hat-y
J = np.sum(error**2)/(2*m)
w[0] = w[0] – alpha/m*np.sum(error*dJdw0)
w[1] = w[1] – alpha/m*np.sum(error*dJdw1)
print(“iteration: %4d cost: %10.2f alpha: %10.8f w0: %10.2f w1: %10.2f” %(i, J, alpha, w[0], w[1]))
print(“cost: %10.2f alpha: %10.8f w0: %10.2f w1: %10.2f” %(J, alpha, w[0], w[1]))
plt.scatter(x,y)
plt.plot(x,y_hat)
plt.plot(np.linspace(0,100,100),3.25+2.5*np.linspace(0,100,100),’r–‘)
plt.show
X = np.ones((len(x),2))
X[:,1]=list(x)
Y = y
W = np.dot(np.dot(np.linalg.inv(np.dot(X.T,X)),X.T),Y)
print(W)
X = x.reshape(100,)
Y = y.reshape(100,)
W=np.polyfit(X,Y,1)
print(W)
order = 3
X=np.zeros((len(x),order+1))
for i in range(order+1):
X[:,i]=list(x**i)
W = np.dot(np.dot(np.linalg.inv(np.dot(X.T,X)),X.T),Y)
print(W)
plt.scatter(x,y)
xs = x
xs.sort(axis=0)
X=np.zeros((len(x),order+1))
for i in range(order+1):
X[:,i]=list(xs**i)
W = W.reshape(4,1)
y_hat=np.dot(X,W)
plt.plot(x,y_hat)
plt.show
x11 = np.random.normal(10, 2, 20).reshape(20,1)
x21 = np.random.normal(5, 2, 20).reshape(20,1)
x12 = np.random.normal(5, 3, 20).reshape(20,1)
x22 = np.random.normal(10, 3, 20).reshape(20,1)
X1 = np.hstack((np.ones((20,1)),x11,x21))
X2 = np.hstack((np.ones((20,1)),x12,x22))
X = np.vstack ((X1,X2))
Y = np.vstack ((np.ones((20,1)), np.zeros((20,1))))
plt.plot(x11,x21,’ro’,x12,x22,’bo’)
plt.show
alpha = 0.01
itera = 10000
m = Y.shape[0]
W = np.random.random((3,1))
for i in range(itera):
Z = np.dot(X, W)
H = 1 / (1 + np.exp(-Z))
L = -np.sum(Y*np.log(H)+ (1-Y)*np.log(1-H))
dW = np.dot(X.T, (H – Y)) / m
W = W – alpha*dW
y1 = np.array([np.min(X),np.max(X)])
y2 = -((W[0,0] + W[1,0]*y1)/W[2,0])
plt.plot(x11,x21,’ro’,x12,x22,’bo’)
plt.plot(y1,y2,’–‘)
plt.show
x13 = np.random.normal(10, 2, 20).reshape(20,1)
x23 = np.random.normal(15, 3, 20).reshape(20,1)
X3 =np.hstack([np.ones((20,1)),x13,x23])
X = np.vstack((X1,X2,X3))
plt.plot(x11,x21,’ro’,x12,x22,’bo’,x13,x23,’go’)
plt.show
classes = 3
alpha = 0.01
itera = 10000
for c in range(classes):
Y = np.zeros((60,1))
a = 20*c
b = 20*(c+1)
Y[a:b,:]=np.ones((20,1))
W = np.random.random((3,1))
m = Y.shape[0]
for i in range(itera):
Z = np.dot(X, W)
H = 1 / (1 + np.exp(-Z))
L = -np.sum(Y*np.log(H)+(1-Y)*np.log(1-H))
dW = np.dot(X.T, (H – Y)) / m
W = W – alpha*dW
y1 = np.array([np.min(X[:,1]),np.max(X[:,1])])
y2 = -((W[0,0] + W[1,0]*y1)/W[2,0])
plt.plot(X[:,1],X[:,2],’go’,X[a:b,1],X[a:b,2],’ro’)
plt.plot(y1,y2,’–‘)
plt.show
plt.figure()
x11 = np.random.normal(10, 2, 20).reshape(20,1)
x21 = np.random.normal(5, 2, 20).reshape(20,1)
x12 = np.random.normal(5, 3, 20).reshape(20,1)
x22 = np.random.normal(10, 3, 20).reshape(20,1)
x13 = np.random.normal(10, 2, 20).reshape(20,1)
x23 = np.random.normal(15, 3, 20).reshape(20,1)
X1 = np.hstack((x11,x21))
X2 = np.hstack((x12,x22))
X3 = np.hstack((x13,x23))
X = np.vstack ((X1,X2,X3))
Y = np.vstack ((np.zeros((20,1)),np.ones((20,1)),2*np.ones((20,1))))
from sklearn.linear_model import LogisticRegression
softmax_reg = LogisticRegression(multi_class=”multinomial”,solver=”lbfgs”, C=10)
logreg = softmax_reg.fit(X, Y.reshape(60,))
logreg.fit(X, Y.reshape(60,))
x_min, x_max = X[:, 0].min() – .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() – .5, X[:, 1].max() + .5
h = .02 # step size in the mesh
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
Z = logreg.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.figure(1, figsize=(4, 3))
plt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)
plt.plot(X[0:20, 0], X[0:20, 1],’ro’,X[20:40, 0], X[20:40, 1],’bo’,X[40:60, 0], X[40:60, 1],’go’)
plt.show()